diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9cba65a6de38..1a2614a57246 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -237,6 +237,10 @@ # PRLabel: %Digital Twins /sdk/digitaltwins/ @Aashish93-stack @johngallardo @Satya-Kolluri @sjiherzig +# ServiceLabel: %Discovery +# PRLabel: %Discovery +/sdk/discovery/ @oylawal @achocron + # PRLabel: %Document Intelligence /sdk/documentintelligence/ @bojunehsu @yungshinlintw @@ -340,7 +344,8 @@ /sdk/monitor/azure-monitor-ingestion/ @Azure/azure-sdk-write-monitor-data-plane # PRLabel: %Cognitive - Content Understanding -/sdk/contentunderstanding/ @bojunehsu @changjian-wang @chienyuanchang @yungshinlintw +/sdk/contentunderstanding/ @bojunehsu @changjian-wang @chienyuanchang @yungshinlintw + # PRLabel: %Monitor /sdk/monitor/azure-monitor-query/ @Azure/azure-sdk-write-monitor-query-logs diff --git a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md new file mode 100644 index 000000000000..e561ba6823b5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0b1 (2026-03-09) + +### Other Changes + + - Initial version \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/LICENSE b/sdk/discovery/azure-mgmt-discovery/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/MANIFEST.in b/sdk/discovery/azure-mgmt-discovery/MANIFEST.in new file mode 100644 index 000000000000..9fcce60f53a6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/MANIFEST.in @@ -0,0 +1,7 @@ +include *.md +include LICENSE +include azure/mgmt/discovery/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/mgmt/__init__.py diff --git a/sdk/discovery/azure-mgmt-discovery/README.md b/sdk/discovery/azure-mgmt-discovery/README.md new file mode 100644 index 000000000000..0d5eadb64512 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/README.md @@ -0,0 +1,61 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure Discovery Management Client Library. +This package has been tested with Python 3.9+. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + +## _Disclaimer_ + +_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ + +## Getting started + +### Prerequisites + +- Python 3.9+ is required to use this package. +- [Azure subscription](https://azure.microsoft.com/free/) + +### Install the package + +```bash +pip install azure-mgmt-discovery +pip install azure-identity +``` + +### Authentication + +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables. + +- `AZURE_CLIENT_ID` for Azure client ID. +- `AZURE_TENANT_ID` for Azure tenant ID. +- `AZURE_CLIENT_SECRET` for Azure client secret. + +In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`. + +With above configuration, client can be authenticated by following code: + +```python +from azure.identity import DefaultAzureCredential +from azure.mgmt.discovery import DiscoveryMgmtClient +import os + +sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") +client = DiscoveryMgmtClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +``` + +## Examples + +Code samples for this package can be found at: +- [Search Discovery Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com +- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +## Troubleshooting + +## Next steps + +## Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. diff --git a/sdk/discovery/azure-mgmt-discovery/_metadata.json b/sdk/discovery/azure-mgmt-discovery/_metadata.json new file mode 100644 index 000000000000..d62706705837 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/_metadata.json @@ -0,0 +1,10 @@ +{ + "apiVersion": "2026-02-01-preview", + "apiVersions": { + "Microsoft.Discovery": "2026-02-01-preview" + }, + "commit": "402bf21e472d87c1b3abcd0b2675a8b6c8a42700", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/discovery/Discovery.Management", + "emitterVersion": "0.60.2" +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/apiview-properties.json b/sdk/discovery/azure-mgmt-discovery/apiview-properties.json new file mode 100644 index 000000000000..f5370173cc3b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/apiview-properties.json @@ -0,0 +1,191 @@ +{ + "CrossLanguagePackageId": "Microsoft.Discovery", + "CrossLanguageDefinitionId": { + "azure.mgmt.discovery.models.StorageStore": "Microsoft.Discovery.StorageStore", + "azure.mgmt.discovery.models.AzureNetAppFilesStore": "Microsoft.Discovery.AzureNetAppFilesStore", + "azure.mgmt.discovery.models.AzureStorageBlobStore": "Microsoft.Discovery.AzureStorageBlobStore", + "azure.mgmt.discovery.models.Resource": "Azure.ResourceManager.CommonTypes.Resource", + "azure.mgmt.discovery.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource", + "azure.mgmt.discovery.models.Bookshelf": "Microsoft.Discovery.Bookshelf", + "azure.mgmt.discovery.models.BookshelfKeyVaultProperties": "Microsoft.Discovery.BookshelfKeyVaultProperties", + "azure.mgmt.discovery.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource", + "azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection": "Microsoft.Discovery.BookshelfPrivateEndpointConnection", + "azure.mgmt.discovery.models.BookshelfPrivateLinkResource": "Microsoft.Discovery.BookshelfPrivateLinkResource", + "azure.mgmt.discovery.models.BookshelfProperties": "Microsoft.Discovery.BookshelfProperties", + "azure.mgmt.discovery.models.ChatModelDeployment": "Microsoft.Discovery.ChatModelDeployment", + "azure.mgmt.discovery.models.ChatModelDeploymentProperties": "Microsoft.Discovery.ChatModelDeploymentProperties", + "azure.mgmt.discovery.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", + "azure.mgmt.discovery.models.ErrorDetail": "Azure.ResourceManager.CommonTypes.ErrorDetail", + "azure.mgmt.discovery.models.ErrorResponse": "Azure.ResourceManager.CommonTypes.ErrorResponse", + "azure.mgmt.discovery.models.Identity": "Microsoft.Discovery.Identity", + "azure.mgmt.discovery.models.KeyVaultProperties": "Microsoft.Discovery.KeyVaultProperties", + "azure.mgmt.discovery.models.MoboBrokerResource": "Azure.ResourceManager.CommonTypes.MoboBrokerResource", + "azure.mgmt.discovery.models.NodePool": "Microsoft.Discovery.NodePool", + "azure.mgmt.discovery.models.NodePoolProperties": "Microsoft.Discovery.NodePoolProperties", + "azure.mgmt.discovery.models.Operation": "Azure.ResourceManager.CommonTypes.Operation", + "azure.mgmt.discovery.models.OperationDisplay": "Azure.ResourceManager.CommonTypes.OperationDisplay", + "azure.mgmt.discovery.models.PrivateEndpoint": "Azure.ResourceManager.CommonTypes.PrivateEndpoint", + "azure.mgmt.discovery.models.PrivateEndpointConnection": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnection", + "azure.mgmt.discovery.models.PrivateEndpointConnectionProperties": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProperties", + "azure.mgmt.discovery.models.PrivateLinkResourceProperties": "Azure.ResourceManager.CommonTypes.PrivateLinkResourceProperties", + "azure.mgmt.discovery.models.PrivateLinkServiceConnectionState": "Azure.ResourceManager.CommonTypes.PrivateLinkServiceConnectionState", + "azure.mgmt.discovery.models.Project": "Microsoft.Discovery.Project", + "azure.mgmt.discovery.models.ProjectProperties": "Microsoft.Discovery.ProjectProperties", + "azure.mgmt.discovery.models.ProjectSettings": "Microsoft.Discovery.ProjectSettings", + "azure.mgmt.discovery.models.StorageAsset": "Microsoft.Discovery.StorageAsset", + "azure.mgmt.discovery.models.StorageAssetProperties": "Microsoft.Discovery.StorageAssetProperties", + "azure.mgmt.discovery.models.StorageContainer": "Microsoft.Discovery.StorageContainer", + "azure.mgmt.discovery.models.StorageContainerProperties": "Microsoft.Discovery.StorageContainerProperties", + "azure.mgmt.discovery.models.Supercomputer": "Microsoft.Discovery.Supercomputer", + "azure.mgmt.discovery.models.SupercomputerIdentities": "Microsoft.Discovery.SupercomputerIdentities", + "azure.mgmt.discovery.models.SupercomputerProperties": "Microsoft.Discovery.SupercomputerProperties", + "azure.mgmt.discovery.models.SystemData": "Azure.ResourceManager.CommonTypes.SystemData", + "azure.mgmt.discovery.models.Tool": "Microsoft.Discovery.Tool", + "azure.mgmt.discovery.models.ToolProperties": "Microsoft.Discovery.ToolProperties", + "azure.mgmt.discovery.models.UserAssignedIdentity": "Azure.ResourceManager.CommonTypes.UserAssignedIdentity", + "azure.mgmt.discovery.models.WithMoboBrokerResources": "Microsoft.Discovery.WithMoboBrokerResources", + "azure.mgmt.discovery.models.Workspace": "Microsoft.Discovery.Workspace", + "azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection": "Microsoft.Discovery.WorkspacePrivateEndpointConnection", + "azure.mgmt.discovery.models.WorkspacePrivateLinkResource": "Microsoft.Discovery.WorkspacePrivateLinkResource", + "azure.mgmt.discovery.models.WorkspaceProperties": "Microsoft.Discovery.WorkspaceProperties", + "azure.mgmt.discovery.models.Origin": "Azure.ResourceManager.CommonTypes.Origin", + "azure.mgmt.discovery.models.ActionType": "Azure.ResourceManager.CommonTypes.ActionType", + "azure.mgmt.discovery.models.CreatedByType": "Azure.ResourceManager.CommonTypes.createdByType", + "azure.mgmt.discovery.models.ProvisioningState": "Microsoft.Discovery.ProvisioningState", + "azure.mgmt.discovery.models.CustomerManagedKeys": "Microsoft.Discovery.CustomerManagedKeys", + "azure.mgmt.discovery.models.PrivateEndpointServiceConnectionStatus": "Azure.ResourceManager.CommonTypes.PrivateEndpointServiceConnectionStatus", + "azure.mgmt.discovery.models.PrivateEndpointConnectionProvisioningState": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProvisioningState", + "azure.mgmt.discovery.models.PublicNetworkAccess": "Microsoft.Discovery.PublicNetworkAccess", + "azure.mgmt.discovery.models.VmSize": "Microsoft.Discovery.VmSize", + "azure.mgmt.discovery.models.ScaleSetPriority": "Microsoft.Discovery.ScaleSetPriority", + "azure.mgmt.discovery.models.NetworkEgressType": "Microsoft.Discovery.NetworkEgressType", + "azure.mgmt.discovery.models.SystemSku": "Microsoft.Discovery.SystemSku", + "azure.mgmt.discovery.models.StorageStoreType": "Microsoft.Discovery.StorageStoreType", + "azure.mgmt.discovery.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.discovery.aio.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.discovery.operations.BookshelvesOperations.get": "Microsoft.Discovery.Bookshelves.get", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.get": "Microsoft.Discovery.Bookshelves.get", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_create_or_update": "Microsoft.Discovery.Bookshelves.createOrUpdate", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_create_or_update": "Microsoft.Discovery.Bookshelves.createOrUpdate", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_update": "Microsoft.Discovery.Bookshelves.update", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_update": "Microsoft.Discovery.Bookshelves.update", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_delete": "Microsoft.Discovery.Bookshelves.delete", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_delete": "Microsoft.Discovery.Bookshelves.delete", + "azure.mgmt.discovery.operations.BookshelvesOperations.list_by_resource_group": "Microsoft.Discovery.Bookshelves.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.list_by_resource_group": "Microsoft.Discovery.Bookshelves.listByResourceGroup", + "azure.mgmt.discovery.operations.BookshelvesOperations.list_by_subscription": "Microsoft.Discovery.Bookshelves.listBySubscription", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.list_by_subscription": "Microsoft.Discovery.Bookshelves.listBySubscription", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.get", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.get", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.delete", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.delete", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.listByBookshelf", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.listByBookshelf", + "azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations.get": "Microsoft.Discovery.BookshelfPrivateLinkResources.get", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations.get": "Microsoft.Discovery.BookshelfPrivateLinkResources.get", + "azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateLinkResources.listByBookshelf", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateLinkResources.listByBookshelf", + "azure.mgmt.discovery.operations.ToolsOperations.get": "Microsoft.Discovery.Tools.get", + "azure.mgmt.discovery.aio.operations.ToolsOperations.get": "Microsoft.Discovery.Tools.get", + "azure.mgmt.discovery.operations.ToolsOperations.begin_create_or_update": "Microsoft.Discovery.Tools.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_create_or_update": "Microsoft.Discovery.Tools.createOrUpdate", + "azure.mgmt.discovery.operations.ToolsOperations.begin_update": "Microsoft.Discovery.Tools.update", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_update": "Microsoft.Discovery.Tools.update", + "azure.mgmt.discovery.operations.ToolsOperations.begin_delete": "Microsoft.Discovery.Tools.delete", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_delete": "Microsoft.Discovery.Tools.delete", + "azure.mgmt.discovery.operations.ToolsOperations.list_by_resource_group": "Microsoft.Discovery.Tools.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.ToolsOperations.list_by_resource_group": "Microsoft.Discovery.Tools.listByResourceGroup", + "azure.mgmt.discovery.operations.ToolsOperations.list_by_subscription": "Microsoft.Discovery.Tools.listBySubscription", + "azure.mgmt.discovery.aio.operations.ToolsOperations.list_by_subscription": "Microsoft.Discovery.Tools.listBySubscription", + "azure.mgmt.discovery.operations.ProjectsOperations.get": "Microsoft.Discovery.Projects.get", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.get": "Microsoft.Discovery.Projects.get", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_create_or_update": "Microsoft.Discovery.Projects.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_create_or_update": "Microsoft.Discovery.Projects.createOrUpdate", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_update": "Microsoft.Discovery.Projects.update", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_update": "Microsoft.Discovery.Projects.update", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_delete": "Microsoft.Discovery.Projects.delete", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_delete": "Microsoft.Discovery.Projects.delete", + "azure.mgmt.discovery.operations.ProjectsOperations.list_by_workspace": "Microsoft.Discovery.Projects.listByWorkspace", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.list_by_workspace": "Microsoft.Discovery.Projects.listByWorkspace", + "azure.mgmt.discovery.operations.WorkspacesOperations.get": "Microsoft.Discovery.Workspaces.get", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.get": "Microsoft.Discovery.Workspaces.get", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_create_or_update": "Microsoft.Discovery.Workspaces.createOrUpdate", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_create_or_update": "Microsoft.Discovery.Workspaces.createOrUpdate", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_update": "Microsoft.Discovery.Workspaces.update", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_update": "Microsoft.Discovery.Workspaces.update", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_delete": "Microsoft.Discovery.Workspaces.delete", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_delete": "Microsoft.Discovery.Workspaces.delete", + "azure.mgmt.discovery.operations.WorkspacesOperations.list_by_resource_group": "Microsoft.Discovery.Workspaces.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.list_by_resource_group": "Microsoft.Discovery.Workspaces.listByResourceGroup", + "azure.mgmt.discovery.operations.WorkspacesOperations.list_by_subscription": "Microsoft.Discovery.Workspaces.listBySubscription", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.list_by_subscription": "Microsoft.Discovery.Workspaces.listBySubscription", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.get", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.get", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.delete", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.delete", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.listByWorkspace", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.listByWorkspace", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.get": "Microsoft.Discovery.ChatModelDeployments.get", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.get": "Microsoft.Discovery.ChatModelDeployments.get", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_create_or_update": "Microsoft.Discovery.ChatModelDeployments.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_create_or_update": "Microsoft.Discovery.ChatModelDeployments.createOrUpdate", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_update": "Microsoft.Discovery.ChatModelDeployments.update", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_update": "Microsoft.Discovery.ChatModelDeployments.update", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_delete": "Microsoft.Discovery.ChatModelDeployments.delete", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_delete": "Microsoft.Discovery.ChatModelDeployments.delete", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.list_by_workspace": "Microsoft.Discovery.ChatModelDeployments.listByWorkspace", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.list_by_workspace": "Microsoft.Discovery.ChatModelDeployments.listByWorkspace", + "azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations.get": "Microsoft.Discovery.WorkspacePrivateLinkResources.get", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations.get": "Microsoft.Discovery.WorkspacePrivateLinkResources.get", + "azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateLinkResources.listByWorkspace", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateLinkResources.listByWorkspace", + "azure.mgmt.discovery.operations.NodePoolsOperations.get": "Microsoft.Discovery.NodePools.get", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.get": "Microsoft.Discovery.NodePools.get", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_create_or_update": "Microsoft.Discovery.NodePools.createOrUpdate", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_create_or_update": "Microsoft.Discovery.NodePools.createOrUpdate", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_update": "Microsoft.Discovery.NodePools.update", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_update": "Microsoft.Discovery.NodePools.update", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_delete": "Microsoft.Discovery.NodePools.delete", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_delete": "Microsoft.Discovery.NodePools.delete", + "azure.mgmt.discovery.operations.NodePoolsOperations.list_by_supercomputer": "Microsoft.Discovery.NodePools.listBySupercomputer", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.list_by_supercomputer": "Microsoft.Discovery.NodePools.listBySupercomputer", + "azure.mgmt.discovery.operations.SupercomputersOperations.get": "Microsoft.Discovery.Supercomputers.get", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.get": "Microsoft.Discovery.Supercomputers.get", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_create_or_update": "Microsoft.Discovery.Supercomputers.createOrUpdate", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_create_or_update": "Microsoft.Discovery.Supercomputers.createOrUpdate", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_update": "Microsoft.Discovery.Supercomputers.update", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_update": "Microsoft.Discovery.Supercomputers.update", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_delete": "Microsoft.Discovery.Supercomputers.delete", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_delete": "Microsoft.Discovery.Supercomputers.delete", + "azure.mgmt.discovery.operations.SupercomputersOperations.list_by_resource_group": "Microsoft.Discovery.Supercomputers.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.list_by_resource_group": "Microsoft.Discovery.Supercomputers.listByResourceGroup", + "azure.mgmt.discovery.operations.SupercomputersOperations.list_by_subscription": "Microsoft.Discovery.Supercomputers.listBySubscription", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.list_by_subscription": "Microsoft.Discovery.Supercomputers.listBySubscription", + "azure.mgmt.discovery.operations.StorageAssetsOperations.get": "Microsoft.Discovery.StorageAssets.get", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.get": "Microsoft.Discovery.StorageAssets.get", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_create_or_update": "Microsoft.Discovery.StorageAssets.createOrUpdate", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_create_or_update": "Microsoft.Discovery.StorageAssets.createOrUpdate", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_update": "Microsoft.Discovery.StorageAssets.update", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_update": "Microsoft.Discovery.StorageAssets.update", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_delete": "Microsoft.Discovery.StorageAssets.delete", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_delete": "Microsoft.Discovery.StorageAssets.delete", + "azure.mgmt.discovery.operations.StorageAssetsOperations.list_by_storage_container": "Microsoft.Discovery.StorageAssets.listByStorageContainer", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.list_by_storage_container": "Microsoft.Discovery.StorageAssets.listByStorageContainer", + "azure.mgmt.discovery.operations.StorageContainersOperations.get": "Microsoft.Discovery.StorageContainers.get", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.get": "Microsoft.Discovery.StorageContainers.get", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_create_or_update": "Microsoft.Discovery.StorageContainers.createOrUpdate", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_create_or_update": "Microsoft.Discovery.StorageContainers.createOrUpdate", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_update": "Microsoft.Discovery.StorageContainers.update", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_update": "Microsoft.Discovery.StorageContainers.update", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_delete": "Microsoft.Discovery.StorageContainers.delete", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_delete": "Microsoft.Discovery.StorageContainers.delete", + "azure.mgmt.discovery.operations.StorageContainersOperations.list_by_resource_group": "Microsoft.Discovery.StorageContainers.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.list_by_resource_group": "Microsoft.Discovery.StorageContainers.listByResourceGroup", + "azure.mgmt.discovery.operations.StorageContainersOperations.list_by_subscription": "Microsoft.Discovery.StorageContainers.listBySubscription", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.list_by_subscription": "Microsoft.Discovery.StorageContainers.listBySubscription" + } +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/assets.json b/sdk/discovery/azure-mgmt-discovery/assets.json new file mode 100644 index 000000000000..eb5fac668236 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/assets.json @@ -0,0 +1,6 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "python", + "TagPrefix": "python/discovery/azure-mgmt-discovery", + "Tag": "python/discovery/azure-mgmt-discovery_82e6ebe1f7" +} diff --git a/sdk/discovery/azure-mgmt-discovery/azure/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py new file mode 100644 index 000000000000..75092cc57e16 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import DiscoveryMgmtClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "DiscoveryMgmtClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py new file mode 100644 index 000000000000..c2cb92254322 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py @@ -0,0 +1,206 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.settings import settings +from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from ._configuration import DiscoveryMgmtClientConfiguration +from ._utils.serialization import Deserializer, Serializer +from .operations import ( + BookshelfPrivateEndpointConnectionsOperations, + BookshelfPrivateLinkResourcesOperations, + BookshelvesOperations, + ChatModelDeploymentsOperations, + NodePoolsOperations, + Operations, + ProjectsOperations, + StorageAssetsOperations, + StorageContainersOperations, + SupercomputersOperations, + ToolsOperations, + WorkspacePrivateEndpointConnectionsOperations, + WorkspacePrivateLinkResourcesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class DiscoveryMgmtClient: # pylint: disable=too-many-instance-attributes + """Microsoft.Discovery Resource Provider management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.discovery.operations.Operations + :ivar bookshelves: BookshelvesOperations operations + :vartype bookshelves: azure.mgmt.discovery.operations.BookshelvesOperations + :ivar bookshelf_private_endpoint_connections: BookshelfPrivateEndpointConnectionsOperations + operations + :vartype bookshelf_private_endpoint_connections: + azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations + :ivar bookshelf_private_link_resources: BookshelfPrivateLinkResourcesOperations operations + :vartype bookshelf_private_link_resources: + azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations + :ivar tools: ToolsOperations operations + :vartype tools: azure.mgmt.discovery.operations.ToolsOperations + :ivar projects: ProjectsOperations operations + :vartype projects: azure.mgmt.discovery.operations.ProjectsOperations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.discovery.operations.WorkspacesOperations + :ivar workspace_private_endpoint_connections: WorkspacePrivateEndpointConnectionsOperations + operations + :vartype workspace_private_endpoint_connections: + azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations + :ivar chat_model_deployments: ChatModelDeploymentsOperations operations + :vartype chat_model_deployments: azure.mgmt.discovery.operations.ChatModelDeploymentsOperations + :ivar workspace_private_link_resources: WorkspacePrivateLinkResourcesOperations operations + :vartype workspace_private_link_resources: + azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations + :ivar node_pools: NodePoolsOperations operations + :vartype node_pools: azure.mgmt.discovery.operations.NodePoolsOperations + :ivar supercomputers: SupercomputersOperations operations + :vartype supercomputers: azure.mgmt.discovery.operations.SupercomputersOperations + :ivar storage_assets: StorageAssetsOperations operations + :vartype storage_assets: azure.mgmt.discovery.operations.StorageAssetsOperations + :ivar storage_containers: StorageContainersOperations operations + :vartype storage_containers: azure.mgmt.discovery.operations.StorageContainersOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = DiscoveryMgmtClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, _endpoint), policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelves = BookshelvesOperations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelf_private_endpoint_connections = BookshelfPrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.bookshelf_private_link_resources = BookshelfPrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_private_endpoint_connections = WorkspacePrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.chat_model_deployments = ChatModelDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_private_link_resources = WorkspacePrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.node_pools = NodePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.supercomputers = SupercomputersOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_assets = StorageAssetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_containers = StorageContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py new file mode 100644 index 000000000000..26f0ad9e8c9d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class DiscoveryMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryMgmtClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-02-01-preview") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-discovery/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py new file mode 100644 index 000000000000..b4433021b4e5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py @@ -0,0 +1,1343 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py new file mode 100644 index 000000000000..81ec1de5922b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py @@ -0,0 +1,2041 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py new file mode 100644 index 000000000000..d47f0ad2b019 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import DiscoveryMgmtClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "DiscoveryMgmtClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py new file mode 100644 index 000000000000..535bea73b97a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py @@ -0,0 +1,211 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.settings import settings +from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import DiscoveryMgmtClientConfiguration +from .operations import ( + BookshelfPrivateEndpointConnectionsOperations, + BookshelfPrivateLinkResourcesOperations, + BookshelvesOperations, + ChatModelDeploymentsOperations, + NodePoolsOperations, + Operations, + ProjectsOperations, + StorageAssetsOperations, + StorageContainersOperations, + SupercomputersOperations, + ToolsOperations, + WorkspacePrivateEndpointConnectionsOperations, + WorkspacePrivateLinkResourcesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class DiscoveryMgmtClient: # pylint: disable=too-many-instance-attributes + """Microsoft.Discovery Resource Provider management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.discovery.aio.operations.Operations + :ivar bookshelves: BookshelvesOperations operations + :vartype bookshelves: azure.mgmt.discovery.aio.operations.BookshelvesOperations + :ivar bookshelf_private_endpoint_connections: BookshelfPrivateEndpointConnectionsOperations + operations + :vartype bookshelf_private_endpoint_connections: + azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations + :ivar bookshelf_private_link_resources: BookshelfPrivateLinkResourcesOperations operations + :vartype bookshelf_private_link_resources: + azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations + :ivar tools: ToolsOperations operations + :vartype tools: azure.mgmt.discovery.aio.operations.ToolsOperations + :ivar projects: ProjectsOperations operations + :vartype projects: azure.mgmt.discovery.aio.operations.ProjectsOperations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.discovery.aio.operations.WorkspacesOperations + :ivar workspace_private_endpoint_connections: WorkspacePrivateEndpointConnectionsOperations + operations + :vartype workspace_private_endpoint_connections: + azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations + :ivar chat_model_deployments: ChatModelDeploymentsOperations operations + :vartype chat_model_deployments: + azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations + :ivar workspace_private_link_resources: WorkspacePrivateLinkResourcesOperations operations + :vartype workspace_private_link_resources: + azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations + :ivar node_pools: NodePoolsOperations operations + :vartype node_pools: azure.mgmt.discovery.aio.operations.NodePoolsOperations + :ivar supercomputers: SupercomputersOperations operations + :vartype supercomputers: azure.mgmt.discovery.aio.operations.SupercomputersOperations + :ivar storage_assets: StorageAssetsOperations operations + :vartype storage_assets: azure.mgmt.discovery.aio.operations.StorageAssetsOperations + :ivar storage_containers: StorageContainersOperations operations + :vartype storage_containers: azure.mgmt.discovery.aio.operations.StorageContainersOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = DiscoveryMgmtClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( + base_url=cast(str, _endpoint), policies=_policies, **kwargs + ) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelves = BookshelvesOperations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelf_private_endpoint_connections = BookshelfPrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.bookshelf_private_link_resources = BookshelfPrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_private_endpoint_connections = WorkspacePrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.chat_model_deployments = ChatModelDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_private_link_resources = WorkspacePrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.node_pools = NodePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.supercomputers = SupercomputersOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_assets = StorageAssetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_containers = StorageContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py new file mode 100644 index 000000000000..174be8f96273 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class DiscoveryMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryMgmtClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-02-01-preview") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-discovery/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py new file mode 100644 index 000000000000..6a22eb1590c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import Operations # type: ignore +from ._operations import BookshelvesOperations # type: ignore +from ._operations import BookshelfPrivateEndpointConnectionsOperations # type: ignore +from ._operations import BookshelfPrivateLinkResourcesOperations # type: ignore +from ._operations import ToolsOperations # type: ignore +from ._operations import ProjectsOperations # type: ignore +from ._operations import WorkspacesOperations # type: ignore +from ._operations import WorkspacePrivateEndpointConnectionsOperations # type: ignore +from ._operations import ChatModelDeploymentsOperations # type: ignore +from ._operations import WorkspacePrivateLinkResourcesOperations # type: ignore +from ._operations import NodePoolsOperations # type: ignore +from ._operations import SupercomputersOperations # type: ignore +from ._operations import StorageAssetsOperations # type: ignore +from ._operations import StorageContainersOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Operations", + "BookshelvesOperations", + "BookshelfPrivateEndpointConnectionsOperations", + "BookshelfPrivateLinkResourcesOperations", + "ToolsOperations", + "ProjectsOperations", + "WorkspacesOperations", + "WorkspacePrivateEndpointConnectionsOperations", + "ChatModelDeploymentsOperations", + "WorkspacePrivateLinkResourcesOperations", + "NodePoolsOperations", + "SupercomputersOperations", + "StorageAssetsOperations", + "StorageContainersOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py new file mode 100644 index 000000000000..febeb13e63c7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py @@ -0,0 +1,9323 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.serialization import Deserializer, Serializer +from ...operations._operations import ( + build_bookshelf_private_endpoint_connections_create_or_update_request, + build_bookshelf_private_endpoint_connections_delete_request, + build_bookshelf_private_endpoint_connections_get_request, + build_bookshelf_private_endpoint_connections_list_by_bookshelf_request, + build_bookshelf_private_link_resources_get_request, + build_bookshelf_private_link_resources_list_by_bookshelf_request, + build_bookshelves_create_or_update_request, + build_bookshelves_delete_request, + build_bookshelves_get_request, + build_bookshelves_list_by_resource_group_request, + build_bookshelves_list_by_subscription_request, + build_bookshelves_update_request, + build_chat_model_deployments_create_or_update_request, + build_chat_model_deployments_delete_request, + build_chat_model_deployments_get_request, + build_chat_model_deployments_list_by_workspace_request, + build_chat_model_deployments_update_request, + build_node_pools_create_or_update_request, + build_node_pools_delete_request, + build_node_pools_get_request, + build_node_pools_list_by_supercomputer_request, + build_node_pools_update_request, + build_operations_list_request, + build_projects_create_or_update_request, + build_projects_delete_request, + build_projects_get_request, + build_projects_list_by_workspace_request, + build_projects_update_request, + build_storage_assets_create_or_update_request, + build_storage_assets_delete_request, + build_storage_assets_get_request, + build_storage_assets_list_by_storage_container_request, + build_storage_assets_update_request, + build_storage_containers_create_or_update_request, + build_storage_containers_delete_request, + build_storage_containers_get_request, + build_storage_containers_list_by_resource_group_request, + build_storage_containers_list_by_subscription_request, + build_storage_containers_update_request, + build_supercomputers_create_or_update_request, + build_supercomputers_delete_request, + build_supercomputers_get_request, + build_supercomputers_list_by_resource_group_request, + build_supercomputers_list_by_subscription_request, + build_supercomputers_update_request, + build_tools_create_or_update_request, + build_tools_delete_request, + build_tools_get_request, + build_tools_list_by_resource_group_request, + build_tools_list_by_subscription_request, + build_tools_update_request, + build_workspace_private_endpoint_connections_create_or_update_request, + build_workspace_private_endpoint_connections_delete_request, + build_workspace_private_endpoint_connections_get_request, + build_workspace_private_endpoint_connections_list_by_workspace_request, + build_workspace_private_link_resources_get_request, + build_workspace_private_link_resources_list_by_workspace_request, + build_workspaces_create_or_update_request, + build_workspaces_delete_request, + build_workspaces_get_request, + build_workspaces_list_by_resource_group_request, + build_workspaces_list_by_subscription_request, + build_workspaces_update_request, +) +from .._configuration import DiscoveryMgmtClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] +List = list + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelvesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`bookshelves` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> _models.Bookshelf: + """Get a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: Bookshelf. The Bookshelf is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Bookshelf + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + + _request = build_bookshelves_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Bookshelf, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Is one of the following types: Bookshelf, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Bookshelf, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelves_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by subscription ID. + + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`bookshelf_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: BookshelfPrivateEndpointConnection. The BookshelfPrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelf_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: _models.BookshelfPrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + BookshelfPrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.BookshelfPrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BookshelfPrivateEndpointConnection"]: + """Lists all private endpoint connections for a bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelfPrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`bookshelf_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateLinkResource: + """Gets the specified private link resource for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: BookshelfPrivateLinkResource. The BookshelfPrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_link_resources_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BookshelfPrivateLinkResource"]: + """Lists all private link resources for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateLinkResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_link_resources_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ToolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`tools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> _models.Tool: + """Get a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Tool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + + _request = build_tools_get_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Tool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_create_or_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Is one of the following types: Tool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Is one of the following types: Tool, + JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_tools_delete_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Tool"]: + """List Tool resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Tool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Tool"]: + """List Tool resources by subscription ID. + + :return: An iterator like instance of Tool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ProjectsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`projects` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> _models.Project: + """Get a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: Project. The Project is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Project + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + + _request = build_projects_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Project, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Is one of the following types: Project, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Project, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_projects_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Project"]: + """List Project resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Project + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Project]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_projects_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`workspaces` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Get a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: Workspace. The Workspace is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + + _request = build_workspaces_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Workspace, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Is one of the following types: Workspace, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Workspace, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspaces_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: + """List Workspace resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Workspace + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: + """List Workspace resources by subscription ID. + + :return: An iterator like instance of Workspace + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`workspace_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: WorkspacePrivateEndpointConnection. The WorkspacePrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspace_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: _models.WorkspacePrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + WorkspacePrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.WorkspacePrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.WorkspacePrivateEndpointConnection"]: + """Lists all private endpoint connections for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_endpoint_connections_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ChatModelDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`chat_model_deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> _models.ChatModelDeployment: + """Get a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: ChatModelDeployment. The ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.ChatModelDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.ChatModelDeployment"]: + """List ChatModelDeployment resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of ChatModelDeployment + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ChatModelDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_chat_model_deployments_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ChatModelDeployment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacePrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`workspace_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateLinkResource: + """Gets the specified private link resource for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: WorkspacePrivateLinkResource. The WorkspacePrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_workspace_private_link_resources_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.WorkspacePrivateLinkResource"]: + """Lists all private link resources for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateLinkResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_link_resources_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class NodePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`node_pools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> _models.NodePool: + """Get a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: NodePool. The NodePool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.NodePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + + _request = build_node_pools_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NodePool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Is one of the following types: NodePool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Is one of the following types: + NodePool, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_node_pools_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_supercomputer( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.NodePool"]: + """List NodePool resources by Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An iterator like instance of NodePool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NodePool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_node_pools_list_by_supercomputer_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NodePool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class SupercomputersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`supercomputers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> _models.Supercomputer: + """Get a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: Supercomputer. The Supercomputer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Supercomputer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + + _request = build_supercomputers_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Supercomputer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Is one of the following types: Supercomputer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Supercomputer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_supercomputers_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by subscription ID. + + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageAssetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`storage_assets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> _models.StorageAsset: + """Get a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: StorageAsset. The StorageAsset is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageAsset + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + + _request = build_storage_assets_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAsset, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Is one of the following types: StorageAsset, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageAsset, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_assets_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_storage_container( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageAsset"]: + """List StorageAsset resources by StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An iterator like instance of StorageAsset + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAsset]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_assets_list_by_storage_container_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAsset], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s + :attr:`storage_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> _models.StorageContainer: + """Get a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: StorageContainer. The StorageContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + + _request = build_storage_containers_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Is one of the following types: StorageContainer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageContainer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_containers_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by subscription ID. + + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py new file mode 100644 index 000000000000..5b2204e6df45 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py @@ -0,0 +1,148 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + AzureNetAppFilesStore, + AzureStorageBlobStore, + Bookshelf, + BookshelfKeyVaultProperties, + BookshelfPrivateEndpointConnection, + BookshelfPrivateLinkResource, + BookshelfProperties, + ChatModelDeployment, + ChatModelDeploymentProperties, + ErrorAdditionalInfo, + ErrorDetail, + ErrorResponse, + Identity, + KeyVaultProperties, + MoboBrokerResource, + NodePool, + NodePoolProperties, + Operation, + OperationDisplay, + PrivateEndpoint, + PrivateEndpointConnection, + PrivateEndpointConnectionProperties, + PrivateLinkResourceProperties, + PrivateLinkServiceConnectionState, + Project, + ProjectProperties, + ProjectSettings, + ProxyResource, + Resource, + StorageAsset, + StorageAssetProperties, + StorageContainer, + StorageContainerProperties, + StorageStore, + Supercomputer, + SupercomputerIdentities, + SupercomputerProperties, + SystemData, + Tool, + ToolProperties, + TrackedResource, + UserAssignedIdentity, + WithMoboBrokerResources, + Workspace, + WorkspacePrivateEndpointConnection, + WorkspacePrivateLinkResource, + WorkspaceProperties, +) + +from ._enums import ( # type: ignore + ActionType, + CreatedByType, + CustomerManagedKeys, + NetworkEgressType, + Origin, + PrivateEndpointConnectionProvisioningState, + PrivateEndpointServiceConnectionStatus, + ProvisioningState, + PublicNetworkAccess, + ScaleSetPriority, + StorageStoreType, + SystemSku, + VmSize, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureNetAppFilesStore", + "AzureStorageBlobStore", + "Bookshelf", + "BookshelfKeyVaultProperties", + "BookshelfPrivateEndpointConnection", + "BookshelfPrivateLinkResource", + "BookshelfProperties", + "ChatModelDeployment", + "ChatModelDeploymentProperties", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "Identity", + "KeyVaultProperties", + "MoboBrokerResource", + "NodePool", + "NodePoolProperties", + "Operation", + "OperationDisplay", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionProperties", + "PrivateLinkResourceProperties", + "PrivateLinkServiceConnectionState", + "Project", + "ProjectProperties", + "ProjectSettings", + "ProxyResource", + "Resource", + "StorageAsset", + "StorageAssetProperties", + "StorageContainer", + "StorageContainerProperties", + "StorageStore", + "Supercomputer", + "SupercomputerIdentities", + "SupercomputerProperties", + "SystemData", + "Tool", + "ToolProperties", + "TrackedResource", + "UserAssignedIdentity", + "WithMoboBrokerResources", + "Workspace", + "WorkspacePrivateEndpointConnection", + "WorkspacePrivateLinkResource", + "WorkspaceProperties", + "ActionType", + "CreatedByType", + "CustomerManagedKeys", + "NetworkEgressType", + "Origin", + "PrivateEndpointConnectionProvisioningState", + "PrivateEndpointServiceConnectionStatus", + "ProvisioningState", + "PublicNetworkAccess", + "ScaleSetPriority", + "StorageStoreType", + "SystemSku", + "VmSize", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py new file mode 100644 index 000000000000..478933ec5c1e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py @@ -0,0 +1,179 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Extensible enum. Indicates the action type. "Internal" refers to actions that are for internal + only APIs. + """ + + INTERNAL = "Internal" + """Actions are for internal-only APIs.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The kind of entity that created the resource.""" + + USER = "User" + """The entity was created by a user.""" + APPLICATION = "Application" + """The entity was created by an application.""" + MANAGED_IDENTITY = "ManagedIdentity" + """The entity was created by a managed identity.""" + KEY = "Key" + """The entity was created by a key.""" + + +class CustomerManagedKeys(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of customer managed key usage.""" + + ENABLED = "Enabled" + """Customer managed keys are enabled.""" + DISABLED = "Disabled" + """Customer managed keys are disabled.""" + + +class NetworkEgressType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported network egress types.""" + + LOAD_BALANCER = "LoadBalancer" + """Public outbound network via load balancer (Default).""" + NONE = "None" + """No default outbound.""" + + +class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is "user,system". + """ + + USER = "user" + """Indicates the operation is initiated by a user.""" + SYSTEM = "system" + """Indicates the operation is initiated by a system.""" + USER_SYSTEM = "user,system" + """Indicates the operation is initiated by a user or system.""" + + +class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-long + str, Enum, metaclass=CaseInsensitiveEnumMeta +): + """The current provisioning state.""" + + SUCCEEDED = "Succeeded" + """Connection has been provisioned.""" + CREATING = "Creating" + """Connection is being created.""" + DELETING = "Deleting" + """Connection is being deleted.""" + FAILED = "Failed" + """Connection provisioning has failed.""" + + +class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The private endpoint connection status.""" + + PENDING = "Pending" + """Connection waiting for approval or rejection.""" + APPROVED = "Approved" + """Connection approved.""" + REJECTED = "Rejected" + """Connection Rejected.""" + + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The resource provisioning state.""" + + SUCCEEDED = "Succeeded" + """Resource has been created.""" + FAILED = "Failed" + """Resource creation failed.""" + CANCELED = "Canceled" + """Resource creation was canceled.""" + ACCEPTED = "Accepted" + """The resource create request has been accepted.""" + PROVISIONING = "Provisioning" + """The resource is being provisioned.""" + UPDATING = "Updating" + """The resource is updating.""" + DELETING = "Deleting" + """The resource is being deleted.""" + + +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of public network access.""" + + ENABLED = "Enabled" + """Public network access is enabled.""" + DISABLED = "Disabled" + """Public network access is disabled.""" + + +class ScaleSetPriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported Virtual Machine Scale Set priorities.""" + + REGULAR = "Regular" + """Regular priority Virtual Machine Scale Set.""" + SPOT = "Spot" + """Spot priority Virtual Machine Scale Set.""" + + +class StorageStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The kind of the backing storage store.""" + + AZURE_STORAGE_BLOB = "AzureStorageBlob" + """The Azure storage blob kind.""" + AZURE_NET_APP_FILES = "AzureNetAppFiles" + """The Azure NetApp Files kind.""" + + +class SystemSku(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported System SKU Sizes.""" + + STANDARD_D4_S_V6 = "Standard_D4s_v6" + """Standard_D4s_v6 basic compute VM (default).""" + STANDARD_D4_S_V5 = "Standard_D4s_v5" + """Standard_D4s_v5 SKU.""" + STANDARD_D4_S_V4 = "Standard_D4s_v4" + """Standard_D4s_v4 SKU.""" + + +class VmSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported Azure VM Sizes.""" + + STANDARD_NC24_ADS_A100_V4 = "Standard_NC24ads_A100_v4" + """Standard_NC24ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC48_ADS_A100_V4 = "Standard_NC48ads_A100_v4" + """Standard_NC48ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC96_ADS_A100_V4 = "Standard_NC96ads_A100_v4" + """Standard_NC96ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC4_AS_T4_V3 = "Standard_NC4as_T4_v3" + """Standard_NC4as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC8_AS_T4_V3 = "Standard_NC8as_T4_v3" + """Standard_NC8as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC16_AS_T4_V3 = "Standard_NC16as_T4_v3" + """Standard_NC16as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC64_AS_T4_V3 = "Standard_NC64as_T4_v3" + """Standard_NC64as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NV6_ADS_A10_V5 = "Standard_NV6ads_A10_v5" + """Standard_NV6ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV12_ADS_A10_V5 = "Standard_NV12ads_A10_v5" + """Standard_NV12ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV24_ADS_A10_V5 = "Standard_NV24ads_A10_v5" + """Standard_NV24ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV36_ADS_A10_V5 = "Standard_NV36ads_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV36_ADMS_A10_V5 = "Standard_NV36adms_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV72_ADS_A10_V5 = "Standard_NV72ads_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_ND40_RS_V2 = "Standard_ND40rs_v2" + """Standard_ND40rs_v2 GPU-optimized Azure VM Size.""" diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py new file mode 100644 index 000000000000..79d04a2b71d2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py @@ -0,0 +1,2095 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import StorageStoreType + +if TYPE_CHECKING: + from .. import models as _models + + +class StorageStore(_Model): + """An abstract representation of storage store kind. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureNetAppFilesStore, AzureStorageBlobStore + + :ivar kind: The storage store kind. Required. Known values are: "AzureStorageBlob" and + "AzureNetAppFiles". + :vartype kind: str or ~azure.mgmt.discovery.models.StorageStoreType + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create"]) + """The storage store kind. Required. Known values are: \"AzureStorageBlob\" and + \"AzureNetAppFiles\".""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureNetAppFilesStore(StorageStore, discriminator="AzureNetAppFiles"): + """The Azure NetApp Files properties. + + :ivar kind: Azure NetApp Files. Required. The Azure NetApp Files kind. + :vartype kind: str or ~azure.mgmt.discovery.models.AZURE_NET_APP_FILES + :ivar net_app_volume_id: The associated Azure NetApp Files volume ID. Required. + :vartype net_app_volume_id: str + """ + + kind: Literal[StorageStoreType.AZURE_NET_APP_FILES] = rest_discriminator(name="kind", visibility=["read", "create"]) # type: ignore + """Azure NetApp Files. Required. The Azure NetApp Files kind.""" + net_app_volume_id: str = rest_field(name="netAppVolumeId", visibility=["read", "create"]) + """The associated Azure NetApp Files volume ID. Required.""" + + @overload + def __init__( + self, + *, + net_app_volume_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = StorageStoreType.AZURE_NET_APP_FILES # type: ignore + + +class AzureStorageBlobStore(StorageStore, discriminator="AzureStorageBlob"): + """The Azure storage blob properties. + + :ivar kind: Azure Storage Blob. Required. The Azure storage blob kind. + :vartype kind: str or ~azure.mgmt.discovery.models.AZURE_STORAGE_BLOB + :ivar storage_account_id: The associated Azure Storage Account ID. Required. + :vartype storage_account_id: str + """ + + kind: Literal[StorageStoreType.AZURE_STORAGE_BLOB] = rest_discriminator(name="kind", visibility=["read", "create"]) # type: ignore + """Azure Storage Blob. Required. The Azure storage blob kind.""" + storage_account_id: str = rest_field(name="storageAccountId", visibility=["read", "create"]) + """The associated Azure Storage Account ID. Required.""" + + @overload + def __init__( + self, + *, + storage_account_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = StorageStoreType.AZURE_STORAGE_BLOB # type: ignore + + +class Resource(_Model): + """Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.""" + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the resource.""" + type: Optional[str] = rest_field(visibility=["read"]) + """The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or + \"Microsoft.Storage/storageAccounts\".""" + system_data: Optional["_models.SystemData"] = rest_field(name="systemData", visibility=["read"]) + """Azure Resource Manager metadata containing createdBy and modifiedBy information.""" + + +class TrackedResource(Resource): + """Tracked Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + location: str = rest_field(visibility=["read", "create"]) + """The geo-location where the resource lives. Required.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Bookshelf(TrackedResource): + """Bookshelf tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.BookshelfProperties + """ + + properties: Optional["_models.BookshelfProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.BookshelfProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfKeyVaultProperties(_Model): + """Key Vault Properties with clientId selection. + + :ivar key_vault_uri: The Key Vault URI. Required. + :vartype key_vault_uri: str + :ivar key_name: The Key Name in Key Vault. Required. + :vartype key_name: str + :ivar key_version: The Key Version in Key Vault. + :vartype key_version: str + :ivar identity_client_id: The client ID of the identity to use for accessing the Key Vault. + Must be a workload identity assigned to the Bookshelf resource. Required. + :vartype identity_client_id: str + """ + + key_vault_uri: str = rest_field(name="keyVaultUri", visibility=["read", "create"]) + """The Key Vault URI. Required.""" + key_name: str = rest_field(name="keyName", visibility=["read", "create", "update"]) + """The Key Name in Key Vault. Required.""" + key_version: Optional[str] = rest_field(name="keyVersion", visibility=["read", "create", "update"]) + """The Key Version in Key Vault.""" + identity_client_id: str = rest_field(name="identityClientId", visibility=["read", "create"]) + """The client ID of the identity to use for accessing the Key Vault. Must be a workload identity + assigned to the Bookshelf resource. Required.""" + + @overload + def __init__( + self, + *, + key_vault_uri: str, + key_name: str, + identity_client_id: str, + key_version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProxyResource(Resource): + """Proxy Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + """ + + +class BookshelfPrivateEndpointConnection(ProxyResource): + """The Private Endpoint Connection resource for Bookshelf. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfPrivateLinkResource(ProxyResource): + """A private link resource for Bookshelf. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfProperties(_Model): + """Bookshelf properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar workload_identities: User assigned identity IDs to be used by knowledgebase workloads. + The key value must be the resource ID of the identity resource. + :vartype workload_identities: dict[str, ~azure.mgmt.discovery.models.UserAssignedIdentity] + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar key_vault_properties: The key to use for encrypting data at rest when customer managed + keys are enabled. Required if Customer Managed Keys is enabled. + :vartype key_vault_properties: ~azure.mgmt.discovery.models.BookshelfKeyVaultProperties + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar private_endpoint_connections: List of private endpoint connections. + :vartype private_endpoint_connections: + list[~azure.mgmt.discovery.models.PrivateEndpointConnection] + :ivar public_network_access: Whether or not public network access is allowed for this resource. + For security reasons, it is recommended to disable it whenever possible. Known values are: + "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.discovery.models.PublicNetworkAccess + :ivar private_endpoint_subnet_id: Private Endpoint Subnet ID for private endpoint connections. + :vartype private_endpoint_subnet_id: str + :ivar search_subnet_id: Search Subnet ID for search resources. + :vartype search_subnet_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + :ivar bookshelf_uri: The bookshelf data plane API URI. + :vartype bookshelf_uri: str + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="workloadIdentities", visibility=["read", "create"] + ) + """User assigned identity IDs to be used by knowledgebase workloads. The key value must be the + resource ID of the identity resource.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + key_vault_properties: Optional["_models.BookshelfKeyVaultProperties"] = rest_field( + name="keyVaultProperties", visibility=["read", "create", "update"] + ) + """The key to use for encrypting data at rest when customer managed keys are enabled. Required if + Customer Managed Keys is enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """List of private endpoint connections.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update"] + ) + """Whether or not public network access is allowed for this resource. For security reasons, it is + recommended to disable it whenever possible. Known values are: \"Enabled\" and \"Disabled\".""" + private_endpoint_subnet_id: Optional[str] = rest_field( + name="privateEndpointSubnetId", visibility=["read", "create"] + ) + """Private Endpoint Subnet ID for private endpoint connections.""" + search_subnet_id: Optional[str] = rest_field(name="searchSubnetId", visibility=["read", "create"]) + """Search Subnet ID for search resources.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + bookshelf_uri: Optional[str] = rest_field(name="bookshelfUri", visibility=["read"]) + """The bookshelf data plane API URI.""" + + @overload + def __init__( + self, + *, + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + key_vault_properties: Optional["_models.BookshelfKeyVaultProperties"] = None, + log_analytics_cluster_id: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + private_endpoint_subnet_id: Optional[str] = None, + search_subnet_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatModelDeployment(TrackedResource): + """Represents a deployment that ties a specific model family to a user defined deployment name + used when invoking the chat model. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ChatModelDeploymentProperties + """ + + properties: Optional["_models.ChatModelDeploymentProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ChatModelDeploymentProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatModelDeploymentProperties(_Model): + """Defines a deployment binding a specific model family to a user-defined deployment name for chat + inference. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar model_format: Model format as published by the provider. Verify supported formats per + region using the Model Catalog API. Required. + :vartype model_format: str + :ivar model_name: Canonical provider model name available in the selected region. Verify + supported values per region using the Model Catalog API. Required. + :vartype model_name: str + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + model_format: str = rest_field(name="modelFormat", visibility=["read", "create"]) + """Model format as published by the provider. Verify supported formats per region using the Model + Catalog API. Required.""" + model_name: str = rest_field(name="modelName", visibility=["read", "create"]) + """Canonical provider model name available in the selected region. Verify supported values per + region using the Model Catalog API. Required.""" + + @overload + def __init__( + self, + *, + model_format: str, + model_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorAdditionalInfo(_Model): + """The resource management error additional info. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The additional info type.""" + info: Optional[Any] = rest_field(visibility=["read"]) + """The additional info.""" + + +class ErrorDetail(_Model): + """The error detail. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.discovery.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.discovery.models.ErrorAdditionalInfo] + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + target: Optional[str] = rest_field(visibility=["read"]) + """The error target.""" + details: Optional[list["_models.ErrorDetail"]] = rest_field(visibility=["read"]) + """The error details.""" + additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = rest_field( + name="additionalInfo", visibility=["read"] + ) + """The error additional info.""" + + +class ErrorResponse(_Model): + """Error response. + + :ivar error: The error object. + :vartype error: ~azure.mgmt.discovery.models.ErrorDetail + """ + + error: Optional["_models.ErrorDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Identity(_Model): + """For user assigned identity resource property. + + :ivar id: The resource ID of the user assigned identity. Required. + :vartype id: str + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + id: str = rest_field(visibility=["read", "create", "update"]) + """The resource ID of the user assigned identity. Required.""" + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of the assigned identity.""" + client_id: Optional[str] = rest_field(name="clientId", visibility=["read"]) + """The client ID of the assigned identity.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KeyVaultProperties(_Model): + """For Key Vault Key references. + + :ivar key_vault_uri: The Key Vault URI. Required. + :vartype key_vault_uri: str + :ivar key_name: The Key Name in Key Vault. Required. + :vartype key_name: str + :ivar key_version: The Key Version in Key Vault. + :vartype key_version: str + """ + + key_vault_uri: str = rest_field(name="keyVaultUri", visibility=["read", "create"]) + """The Key Vault URI. Required.""" + key_name: str = rest_field(name="keyName", visibility=["read", "create", "update"]) + """The Key Name in Key Vault. Required.""" + key_version: Optional[str] = rest_field(name="keyVersion", visibility=["read", "create", "update"]) + """The Key Version in Key Vault.""" + + @overload + def __init__( + self, + *, + key_vault_uri: str, + key_name: str, + key_version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MoboBrokerResource(_Model): + """Managed-On-Behalf-Of broker resource. This resource is created by the Resource Provider to + manage some resources on behalf of the user. + + :ivar id: Resource identifier of a Managed-On-Behalf-Of broker resource. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource identifier of a Managed-On-Behalf-Of broker resource.""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NodePool(TrackedResource): + """NodePool tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.NodePoolProperties + """ + + properties: Optional["_models.NodePoolProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.NodePoolProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NodePoolProperties(_Model): + """NodePool properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar subnet_id: The node pool subnet. Required. + :vartype subnet_id: str + :ivar vm_size: The size of the underlying Azure VM. Required. Known values are: + "Standard_NC24ads_A100_v4", "Standard_NC48ads_A100_v4", "Standard_NC96ads_A100_v4", + "Standard_NC4as_T4_v3", "Standard_NC8as_T4_v3", "Standard_NC16as_T4_v3", + "Standard_NC64as_T4_v3", "Standard_NV6ads_A10_v5", "Standard_NV12ads_A10_v5", + "Standard_NV24ads_A10_v5", "Standard_NV36ads_A10_v5", "Standard_NV36adms_A10_v5", + "Standard_NV72ads_A10_v5", and "Standard_ND40rs_v2". + :vartype vm_size: str or ~azure.mgmt.discovery.models.VmSize + :ivar max_node_count: The maximum number of nodes. Required. + :vartype max_node_count: int + :ivar min_node_count: The minimum number of nodes. + :vartype min_node_count: int + :ivar scale_set_priority: The Virtual Machine Scale Set priority. If not specified, the default + is 'Regular'. Known values are: "Regular" and "Spot". + :vartype scale_set_priority: str or ~azure.mgmt.discovery.models.ScaleSetPriority + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + subnet_id: str = rest_field(name="subnetId", visibility=["read", "create"]) + """The node pool subnet. Required.""" + vm_size: Union[str, "_models.VmSize"] = rest_field(name="vmSize", visibility=["read", "create"]) + """The size of the underlying Azure VM. Required. Known values are: \"Standard_NC24ads_A100_v4\", + \"Standard_NC48ads_A100_v4\", \"Standard_NC96ads_A100_v4\", \"Standard_NC4as_T4_v3\", + \"Standard_NC8as_T4_v3\", \"Standard_NC16as_T4_v3\", \"Standard_NC64as_T4_v3\", + \"Standard_NV6ads_A10_v5\", \"Standard_NV12ads_A10_v5\", \"Standard_NV24ads_A10_v5\", + \"Standard_NV36ads_A10_v5\", \"Standard_NV36adms_A10_v5\", \"Standard_NV72ads_A10_v5\", and + \"Standard_ND40rs_v2\".""" + max_node_count: int = rest_field(name="maxNodeCount", visibility=["read", "create", "update"]) + """The maximum number of nodes. Required.""" + min_node_count: Optional[int] = rest_field(name="minNodeCount", visibility=["read", "create", "update"]) + """The minimum number of nodes.""" + scale_set_priority: Optional[Union[str, "_models.ScaleSetPriority"]] = rest_field( + name="scaleSetPriority", visibility=["read", "create"] + ) + """The Virtual Machine Scale Set priority. If not specified, the default is 'Regular'. Known + values are: \"Regular\" and \"Spot\".""" + + @overload + def __init__( + self, + *, + subnet_id: str, + vm_size: Union[str, "_models.VmSize"], + max_node_count: int, + min_node_count: Optional[int] = None, + scale_set_priority: Optional[Union[str, "_models.ScaleSetPriority"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Operation(_Model): + """REST API Operation. + + :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". + :vartype name: str + :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for + data-plane operations and "false" for Azure Resource Manager/control-plane operations. + :vartype is_data_action: bool + :ivar display: Localized display information for this particular operation. + :vartype display: ~azure.mgmt.discovery.models.OperationDisplay + :ivar origin: The intended executor of the operation; as in Resource Based Access Control + (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", + and "user,system". + :vartype origin: str or ~azure.mgmt.discovery.models.Origin + :ivar action_type: Extensible enum. Indicates the action type. "Internal" refers to actions + that are for internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.discovery.models.ActionType + """ + + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + \"Microsoft.Compute/virtualMachines/write\", + \"Microsoft.Compute/virtualMachines/capture/action\".""" + is_data_action: Optional[bool] = rest_field(name="isDataAction", visibility=["read"]) + """Whether the operation applies to data-plane. This is \"true\" for data-plane operations and + \"false\" for Azure Resource Manager/control-plane operations.""" + display: Optional["_models.OperationDisplay"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Localized display information for this particular operation.""" + origin: Optional[Union[str, "_models.Origin"]] = rest_field(visibility=["read"]) + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is \"user,system\". Known values are: \"user\", \"system\", and + \"user,system\".""" + action_type: Optional[Union[str, "_models.ActionType"]] = rest_field(name="actionType", visibility=["read"]) + """Extensible enum. Indicates the action type. \"Internal\" refers to actions that are for + internal only APIs. \"Internal\"""" + + @overload + def __init__( + self, + *, + display: Optional["_models.OperationDisplay"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OperationDisplay(_Model): + """Localized display information for an operation. + + :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft + Monitoring Insights" or "Microsoft Compute". + :vartype provider: str + :ivar resource: The localized friendly name of the resource type related to this operation. + E.g. "Virtual Machines" or "Job Schedule Collections". + :vartype resource: str + :ivar operation: The concise, localized friendly name for the operation; suitable for + dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". + :vartype operation: str + :ivar description: The short, localized friendly description of the operation; suitable for + tool tips and detailed views. + :vartype description: str + """ + + provider: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring + Insights\" or \"Microsoft Compute\".""" + resource: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly name of the resource type related to this operation. E.g. \"Virtual + Machines\" or \"Job Schedule Collections\".""" + operation: Optional[str] = rest_field(visibility=["read"]) + """The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create + or Update Virtual Machine\", \"Restart Virtual Machine\".""" + description: Optional[str] = rest_field(visibility=["read"]) + """The short, localized friendly description of the operation; suitable for tool tips and detailed + views.""" + + +class PrivateEndpoint(_Model): + """The private endpoint resource. + + :ivar id: The resource identifier of the private endpoint. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """The resource identifier of the private endpoint.""" + + +class PrivateEndpointConnection(Resource): + """The private endpoint connection resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The private endpoint connection properties. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The private endpoint connection properties.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateEndpointConnectionProperties(_Model): + """Properties of the private endpoint connection. + + :ivar group_ids: The group ids for the private endpoint resource. + :vartype group_ids: list[str] + :ivar private_endpoint: The private endpoint resource. + :vartype private_endpoint: ~azure.mgmt.discovery.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. Required. + :vartype private_link_service_connection_state: + ~azure.mgmt.discovery.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.discovery.models.PrivateEndpointConnectionProvisioningState + """ + + group_ids: Optional[list[str]] = rest_field(name="groupIds", visibility=["read"]) + """The group ids for the private endpoint resource.""" + private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field( + name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"] + ) + """The private endpoint resource.""" + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState" = rest_field( + name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"] + ) + """A collection of information about the state of the connection between service consumer and + provider. Required.""" + provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of the private endpoint connection resource. Known values are: + \"Succeeded\", \"Creating\", \"Deleting\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState", + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResourceProperties(_Model): + """Properties of a private link resource. + + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + group_id: Optional[str] = rest_field(name="groupId", visibility=["read"]) + """The private link resource group id.""" + required_members: Optional[list[str]] = rest_field(name="requiredMembers", visibility=["read"]) + """The private link resource required member names.""" + required_zone_names: Optional[list[str]] = rest_field( + name="requiredZoneNames", visibility=["read", "create", "update", "delete", "query"] + ) + """The private link resource private link DNS zone name.""" + + @overload + def __init__( + self, + *, + required_zone_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkServiceConnectionState(_Model): + """A collection of information about the state of the connection between service consumer and + provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", and "Rejected". + :vartype status: str or ~azure.mgmt.discovery.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the connection has been Approved/Rejected/Removed by the owner of the + service. Known values are: \"Pending\", \"Approved\", and \"Rejected\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The reason for approval/rejection of the connection.""" + actions_required: Optional[str] = rest_field( + name="actionsRequired", visibility=["read", "create", "update", "delete", "query"] + ) + """A message indicating if changes on the service provider require any updates on the consumer.""" + + @overload + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Project(TrackedResource): + """Project tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ProjectProperties + """ + + properties: Optional["_models.ProjectProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ProjectProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectProperties(_Model): + """Project properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar foundry_project_endpoint: Foundry project endpoint URI. + :vartype foundry_project_endpoint: str + :ivar storage_container_ids: Allowed StorageContainers (Control plane resource references). + :vartype storage_container_ids: list[str] + :ivar settings: Settings for the project. + :vartype settings: ~azure.mgmt.discovery.models.ProjectSettings + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + foundry_project_endpoint: Optional[str] = rest_field(name="foundryProjectEndpoint", visibility=["read"]) + """Foundry project endpoint URI.""" + storage_container_ids: Optional[list[str]] = rest_field(name="storageContainerIds", visibility=["read", "create"]) + """Allowed StorageContainers (Control plane resource references).""" + settings: Optional["_models.ProjectSettings"] = rest_field(visibility=["read", "create", "update"]) + """Settings for the project.""" + + @overload + def __init__( + self, + *, + storage_container_ids: Optional[list[str]] = None, + settings: Optional["_models.ProjectSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectSettings(_Model): + """Settings schema for the project. + + :ivar behavior_preferences: Default preferences to guide AI behaviors in this project. + :vartype behavior_preferences: str + """ + + behavior_preferences: Optional[str] = rest_field( + name="behaviorPreferences", visibility=["read", "create", "update", "delete", "query"] + ) + """Default preferences to guide AI behaviors in this project.""" + + @overload + def __init__( + self, + *, + behavior_preferences: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAsset(TrackedResource): + """Storage Asset tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.StorageAssetProperties + """ + + properties: Optional["_models.StorageAssetProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.StorageAssetProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAssetProperties(_Model): + """Storage Asset properties. + + :ivar description: The description. Required. + :vartype description: str + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar path: The path to the data within its parent container. This should be relative to the + root of the parent container. + :vartype path: str + """ + + description: str = rest_field(visibility=["read", "create", "update"]) + """The description. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + path: Optional[str] = rest_field(visibility=["read", "create"]) + """The path to the data within its parent container. This should be relative to the root of the + parent container.""" + + @overload + def __init__( + self, + *, + description: str, + path: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageContainer(TrackedResource): + """Storage Container tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.StorageContainerProperties + """ + + properties: Optional["_models.StorageContainerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.StorageContainerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageContainerProperties(_Model): + """Storage Container properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar storage_store: Storage store properties. Required. + :vartype storage_store: ~azure.mgmt.discovery.models.StorageStore + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + storage_store: "_models.StorageStore" = rest_field(name="storageStore", visibility=["read", "create"]) + """Storage store properties. Required.""" + + @overload + def __init__( + self, + *, + storage_store: "_models.StorageStore", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Supercomputer(TrackedResource): + """Supercomputer tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.SupercomputerProperties + """ + + properties: Optional["_models.SupercomputerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.SupercomputerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SupercomputerIdentities(_Model): + """Dictionary of identity properties for the Supercomputer. + + :ivar cluster_identity: Cluster identity ID. Required. + :vartype cluster_identity: ~azure.mgmt.discovery.models.Identity + :ivar kubelet_identity: Kubelet identity ID used by the supercomputer. This identity is used by + the supercomputer at node level to access Azure resources. This identity must have + ManagedIdentityOperator role on the clusterIdentity. Required. + :vartype kubelet_identity: ~azure.mgmt.discovery.models.Identity + :ivar workload_identities: User assigned identity IDs to be used by workloads as federated + credentials running on supercomputer. The key value must be the resource ID of the identity + resource. + :vartype workload_identities: dict[str, ~azure.mgmt.discovery.models.UserAssignedIdentity] + """ + + cluster_identity: "_models.Identity" = rest_field(name="clusterIdentity", visibility=["read", "create"]) + """Cluster identity ID. Required.""" + kubelet_identity: "_models.Identity" = rest_field(name="kubeletIdentity", visibility=["read", "create"]) + """Kubelet identity ID used by the supercomputer. This identity is used by the supercomputer at + node level to access Azure resources. This identity must have ManagedIdentityOperator role on + the clusterIdentity. Required.""" + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="workloadIdentities", visibility=["read", "create", "update"] + ) + """User assigned identity IDs to be used by workloads as federated credentials running on + supercomputer. The key value must be the resource ID of the identity resource.""" + + @overload + def __init__( + self, + *, + cluster_identity: "_models.Identity", + kubelet_identity: "_models.Identity", + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SupercomputerProperties(_Model): + """Supercomputer properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar subnet_id: System Subnet ID associated with managed NodePool for system resources. It + should have connectivity to the child NodePool subnets. Required. + :vartype subnet_id: str + :ivar management_subnet_id: System Subnet ID associated with AKS apiserver. Must be delegated + to Microsoft.ContainerService/managedClusters. It should have connectivity to the system subnet + and nodepool subnets. + :vartype management_subnet_id: str + :ivar outbound_type: Network egress type provisioned for the supercomputer workloads. Defaults + to LoadBalancer if not specified. If None is specified, the customer is responsible for + providing outbound connectivity for Supercomputer functionality. Known values are: + "LoadBalancer" and "None". + :vartype outbound_type: str or ~azure.mgmt.discovery.models.NetworkEgressType + :ivar system_sku: The SKU to use for the system node pool. Known values are: "Standard_D4s_v6", + "Standard_D4s_v5", and "Standard_D4s_v4". + :vartype system_sku: str or ~azure.mgmt.discovery.models.SystemSku + :ivar identities: Dictionary of identity properties. Required. + :vartype identities: ~azure.mgmt.discovery.models.SupercomputerIdentities + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar disk_encryption_set_id: Disk Encryption Set ID to use for Customer Managed Keys + encryption. Required if Customer Managed Keys is enabled. + :vartype disk_encryption_set_id: str + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + subnet_id: str = rest_field(name="subnetId", visibility=["read", "create"]) + """System Subnet ID associated with managed NodePool for system resources. It should have + connectivity to the child NodePool subnets. Required.""" + management_subnet_id: Optional[str] = rest_field(name="managementSubnetId", visibility=["read", "create"]) + """System Subnet ID associated with AKS apiserver. Must be delegated to + Microsoft.ContainerService/managedClusters. It should have connectivity to the system subnet + and nodepool subnets.""" + outbound_type: Optional[Union[str, "_models.NetworkEgressType"]] = rest_field( + name="outboundType", visibility=["read", "create"] + ) + """Network egress type provisioned for the supercomputer workloads. Defaults to LoadBalancer if + not specified. If None is specified, the customer is responsible for providing outbound + connectivity for Supercomputer functionality. Known values are: \"LoadBalancer\" and \"None\".""" + system_sku: Optional[Union[str, "_models.SystemSku"]] = rest_field(name="systemSku", visibility=["read", "create"]) + """The SKU to use for the system node pool. Known values are: \"Standard_D4s_v6\", + \"Standard_D4s_v5\", and \"Standard_D4s_v4\".""" + identities: "_models.SupercomputerIdentities" = rest_field(visibility=["read", "create", "update"]) + """Dictionary of identity properties. Required.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + disk_encryption_set_id: Optional[str] = rest_field(name="diskEncryptionSetId", visibility=["read", "create"]) + """Disk Encryption Set ID to use for Customer Managed Keys encryption. Required if Customer + Managed Keys is enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + + @overload + def __init__( + self, + *, + subnet_id: str, + identities: "_models.SupercomputerIdentities", + management_subnet_id: Optional[str] = None, + outbound_type: Optional[Union[str, "_models.NetworkEgressType"]] = None, + system_sku: Optional[Union[str, "_models.SystemSku"]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + disk_encryption_set_id: Optional[str] = None, + log_analytics_cluster_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SystemData(_Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.discovery.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.discovery.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read", "create", "update", "delete", "query"]) + """The identity that created the resource.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that created the resource. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + created_at: Optional[datetime.datetime] = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource creation (UTC).""" + last_modified_by: Optional[str] = rest_field( + name="lastModifiedBy", visibility=["read", "create", "update", "delete", "query"] + ) + """The identity that last modified the resource.""" + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that last modified the resource. Known values are: \"User\", + \"Application\", \"ManagedIdentity\", and \"Key\".""" + last_modified_at: Optional[datetime.datetime] = rest_field( + name="lastModifiedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource last modification (UTC).""" + + @overload + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Tool(TrackedResource): + """Tool tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ToolProperties + """ + + properties: Optional["_models.ToolProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ToolProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProperties(_Model): + """Discovery Tool list item properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar version: The version of a resource definition. Required. + :vartype version: str + :ivar environment_variables: Environment variables to make available. + :vartype environment_variables: dict[str, str] + :ivar definition_content: The JSON content for defining a resource. Required. + :vartype definition_content: dict[str, any] + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + version: str = rest_field(visibility=["read", "create", "update"]) + """The version of a resource definition. Required.""" + environment_variables: Optional[dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update"] + ) + """Environment variables to make available.""" + definition_content: dict[str, Any] = rest_field(name="definitionContent", visibility=["read", "create", "update"]) + """The JSON content for defining a resource. Required.""" + + @overload + def __init__( + self, + *, + version: str, + definition_content: dict[str, Any], + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UserAssignedIdentity(_Model): + """User assigned identity properties. + + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of the assigned identity.""" + client_id: Optional[str] = rest_field(name="clientId", visibility=["read"]) + """The client ID of the assigned identity.""" + + +class WithMoboBrokerResources(_Model): + """For tracking mobo resources. + + :ivar mobo_broker_resources: Managed-On-Behalf-Of broker resources. + :vartype mobo_broker_resources: list[~azure.mgmt.discovery.models.MoboBrokerResource] + """ + + mobo_broker_resources: Optional[list["_models.MoboBrokerResource"]] = rest_field( + name="moboBrokerResources", visibility=["read"] + ) + """Managed-On-Behalf-Of broker resources.""" + + +class Workspace(TrackedResource): + """Workspace tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.WorkspaceProperties + """ + + properties: Optional["_models.WorkspaceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.WorkspaceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspacePrivateEndpointConnection(ProxyResource): + """The Private Endpoint Connection resource for Workspace. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspacePrivateLinkResource(ProxyResource): + """A private link resource for Workspace. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspaceProperties(_Model): + """Workspace properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar supercomputer_ids: List of linked SuperComputers. + :vartype supercomputer_ids: list[str] + :ivar workspace_api_uri: workspace API endpoint Uri. + :vartype workspace_api_uri: str + :ivar workspace_ui_uri: workspace User Interface Uri. + :vartype workspace_ui_uri: str + :ivar workspace_identity: Identity IDs used for leveraging Workspace resources. Required. + :vartype workspace_identity: ~azure.mgmt.discovery.models.Identity + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar key_vault_properties: The key to use for encrypting data at rest when customer managed + keys are enabled. + :vartype key_vault_properties: ~azure.mgmt.discovery.models.KeyVaultProperties + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar private_endpoint_connections: List of private endpoint connections. + :vartype private_endpoint_connections: + list[~azure.mgmt.discovery.models.PrivateEndpointConnection] + :ivar public_network_access: Whether or not public network access is allowed for this resource. + For security reasons, it is recommended to disable it whenever possible. Known values are: + "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.discovery.models.PublicNetworkAccess + :ivar agent_subnet_id: Agent Subnet ID for agent resources. + :vartype agent_subnet_id: str + :ivar private_endpoint_subnet_id: Private Endpoint Subnet ID for private endpoint connections. + :vartype private_endpoint_subnet_id: str + :ivar workspace_subnet_id: Function Subnet ID for workspace resources. + :vartype workspace_subnet_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + supercomputer_ids: Optional[list[str]] = rest_field( + name="supercomputerIds", visibility=["read", "create", "update"] + ) + """List of linked SuperComputers.""" + workspace_api_uri: Optional[str] = rest_field(name="workspaceApiUri", visibility=["read"]) + """workspace API endpoint Uri.""" + workspace_ui_uri: Optional[str] = rest_field(name="workspaceUiUri", visibility=["read"]) + """workspace User Interface Uri.""" + workspace_identity: "_models.Identity" = rest_field(name="workspaceIdentity", visibility=["read", "create"]) + """Identity IDs used for leveraging Workspace resources. Required.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + key_vault_properties: Optional["_models.KeyVaultProperties"] = rest_field( + name="keyVaultProperties", visibility=["read", "create", "update"] + ) + """The key to use for encrypting data at rest when customer managed keys are enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """List of private endpoint connections.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update"] + ) + """Whether or not public network access is allowed for this resource. For security reasons, it is + recommended to disable it whenever possible. Known values are: \"Enabled\" and \"Disabled\".""" + agent_subnet_id: Optional[str] = rest_field(name="agentSubnetId", visibility=["read", "create"]) + """Agent Subnet ID for agent resources.""" + private_endpoint_subnet_id: Optional[str] = rest_field( + name="privateEndpointSubnetId", visibility=["read", "create"] + ) + """Private Endpoint Subnet ID for private endpoint connections.""" + workspace_subnet_id: Optional[str] = rest_field(name="workspaceSubnetId", visibility=["read", "create"]) + """Function Subnet ID for workspace resources.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + + @overload + def __init__( + self, + *, + workspace_identity: "_models.Identity", + supercomputer_ids: Optional[list[str]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + key_vault_properties: Optional["_models.KeyVaultProperties"] = None, + log_analytics_cluster_id: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + agent_subnet_id: Optional[str] = None, + private_endpoint_subnet_id: Optional[str] = None, + workspace_subnet_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py new file mode 100644 index 000000000000..6a22eb1590c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import Operations # type: ignore +from ._operations import BookshelvesOperations # type: ignore +from ._operations import BookshelfPrivateEndpointConnectionsOperations # type: ignore +from ._operations import BookshelfPrivateLinkResourcesOperations # type: ignore +from ._operations import ToolsOperations # type: ignore +from ._operations import ProjectsOperations # type: ignore +from ._operations import WorkspacesOperations # type: ignore +from ._operations import WorkspacePrivateEndpointConnectionsOperations # type: ignore +from ._operations import ChatModelDeploymentsOperations # type: ignore +from ._operations import WorkspacePrivateLinkResourcesOperations # type: ignore +from ._operations import NodePoolsOperations # type: ignore +from ._operations import SupercomputersOperations # type: ignore +from ._operations import StorageAssetsOperations # type: ignore +from ._operations import StorageContainersOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Operations", + "BookshelvesOperations", + "BookshelfPrivateEndpointConnectionsOperations", + "BookshelfPrivateLinkResourcesOperations", + "ToolsOperations", + "ProjectsOperations", + "WorkspacesOperations", + "WorkspacePrivateEndpointConnectionsOperations", + "ChatModelDeploymentsOperations", + "WorkspacePrivateLinkResourcesOperations", + "NodePoolsOperations", + "SupercomputersOperations", + "StorageAssetsOperations", + "StorageContainersOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py new file mode 100644 index 000000000000..ca47c6cd7b3d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py @@ -0,0 +1,11023 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._configuration import DiscoveryMgmtClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Deserializer, Serializer + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] +List = list + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_operations_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/providers/Microsoft.Discovery/operations" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_get_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_update_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_delete_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_bookshelves_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = ( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/bookshelves" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_link_resources_get_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateLinkResources/{privateLinkResourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_link_resources_list_by_bookshelf_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_get_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_create_or_update_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_update_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_delete_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_tools_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/tools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_get_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_create_or_update_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_update_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_delete_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_projects_list_by_workspace_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_get_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_delete_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_workspaces_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/workspaces" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_workspace_private_endpoint_connections_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_get_request( + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_delete_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_chat_model_deployments_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_link_resources_get_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateLinkResources/{privateLinkResourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_link_resources_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_get_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_update_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_delete_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_node_pools_list_by_supercomputer_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_get_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_update_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_delete_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_supercomputers_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/supercomputers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_get_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_update_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_delete_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_assets_list_by_storage_container_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_get_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_update_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_delete_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_containers_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/storageContainers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelvesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`bookshelves` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> _models.Bookshelf: + """Get a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: Bookshelf. The Bookshelf is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Bookshelf + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + + _request = build_bookshelves_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Bookshelf, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Is one of the following types: Bookshelf, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Bookshelf, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelves_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by subscription ID. + + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`bookshelf_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: BookshelfPrivateEndpointConnection. The BookshelfPrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelf_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: _models.BookshelfPrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + BookshelfPrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BookshelfPrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BookshelfPrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> ItemPaged["_models.BookshelfPrivateEndpointConnection"]: + """Lists all private endpoint connections for a bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateEndpointConnection + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelfPrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`bookshelf_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateLinkResource: + """Gets the specified private link resource for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: BookshelfPrivateLinkResource. The BookshelfPrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_link_resources_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> ItemPaged["_models.BookshelfPrivateLinkResource"]: + """Lists all private link resources for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateLinkResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_link_resources_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ToolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`tools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> _models.Tool: + """Get a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Tool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + + _request = build_tools_get_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Tool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_create_or_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Is one of the following types: Tool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Is one of the following types: Tool, + JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_tools_delete_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Tool"]: + """List Tool resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Tool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Tool"]: + """List Tool resources by subscription ID. + + :return: An iterator like instance of Tool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ProjectsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`projects` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any) -> _models.Project: + """Get a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: Project. The Project is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Project + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + + _request = build_projects_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Project, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Is one of the following types: Project, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Project, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_projects_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.Project"]: + """List Project resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Project + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Project]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_projects_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`workspaces` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Get a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: Workspace. The Workspace is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + + _request = build_workspaces_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Workspace, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Is one of the following types: Workspace, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Workspace, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspaces_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Workspace"]: + """List Workspace resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Workspace + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Workspace"]: + """List Workspace resources by subscription ID. + + :return: An iterator like instance of Workspace + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`workspace_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: WorkspacePrivateEndpointConnection. The WorkspacePrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspace_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: _models.WorkspacePrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + WorkspacePrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.WorkspacePrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.WorkspacePrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.WorkspacePrivateEndpointConnection"]: + """Lists all private endpoint connections for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateEndpointConnection + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_endpoint_connections_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ChatModelDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`chat_model_deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> _models.ChatModelDeployment: + """Get a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: ChatModelDeployment. The ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.ChatModelDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.ChatModelDeployment"]: + """List ChatModelDeployment resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of ChatModelDeployment + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ChatModelDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_chat_model_deployments_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ChatModelDeployment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacePrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`workspace_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateLinkResource: + """Gets the specified private link resource for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: WorkspacePrivateLinkResource. The WorkspacePrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_workspace_private_link_resources_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.WorkspacePrivateLinkResource"]: + """Lists all private link resources for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateLinkResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_link_resources_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class NodePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`node_pools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> _models.NodePool: + """Get a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: NodePool. The NodePool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.NodePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + + _request = build_node_pools_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NodePool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Is one of the following types: NodePool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Is one of the following types: + NodePool, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_node_pools_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_supercomputer( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> ItemPaged["_models.NodePool"]: + """List NodePool resources by Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An iterator like instance of NodePool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NodePool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_node_pools_list_by_supercomputer_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NodePool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class SupercomputersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`supercomputers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> _models.Supercomputer: + """Get a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: Supercomputer. The Supercomputer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Supercomputer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + + _request = build_supercomputers_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Supercomputer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Is one of the following types: Supercomputer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Supercomputer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_supercomputers_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by subscription ID. + + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageAssetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`storage_assets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> _models.StorageAsset: + """Get a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: StorageAsset. The StorageAsset is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageAsset + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + + _request = build_storage_assets_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAsset, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Is one of the following types: StorageAsset, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageAsset, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_assets_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_storage_container( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> ItemPaged["_models.StorageAsset"]: + """List StorageAsset resources by StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An iterator like instance of StorageAsset + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAsset]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_assets_list_by_storage_container_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAsset], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s + :attr:`storage_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> _models.StorageContainer: + """Get a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: StorageContainer. The StorageContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + + _request = build_storage_containers_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Is one of the following types: StorageContainer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageContainer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_containers_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by subscription ID. + + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt b/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt new file mode 100644 index 000000000000..ece056fe0984 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt @@ -0,0 +1,5 @@ +-e ../../../eng/tools/azure-sdk-tools +../../core/azure-core +../../identity/azure-identity +../../core/azure-mgmt-core +aiohttp \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..b1a9a95fd726 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name="rgdiscovery", + bookshelf_name="a65f3c23bf2baa5bd4", + private_endpoint_connection_name="connection", + resource={ + "properties": { + "privateEndpoint": {}, + "privateLinkServiceConnectionState": { + "actionsRequired": "vgqhrxvmviabfgmafqtbej", + "description": "lknyprq", + "status": "Pending", + }, + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py new file mode 100644 index 000000000000..bf1ca146fb27 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name="rgdiscovery", + bookshelf_name="9988c91bf62635cea5", + private_endpoint_connection_name="connection", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py new file mode 100644 index 000000000000..38ef3adf1175 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.get( + resource_group_name="rgdiscovery", + bookshelf_name="ca2ea71fd0a5838c7f", + private_endpoint_connection_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py new file mode 100644 index 000000000000..67eff6f5a4e1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name="rgdiscovery", + bookshelf_name="d96263ffc8d8c904d4", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_ListByBookshelf_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py new file mode 100644 index 000000000000..a3ec3fd8f140 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_link_resources_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_link_resources.get( + resource_group_name="rgdiscovery", + bookshelf_name="9158657d63f4f9235f", + private_link_resource_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateLinkResources_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py new file mode 100644 index 000000000000..0087d4a835e9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name="rgdiscovery", + bookshelf_name="cb4a7b7d5c4b6c3f78", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateLinkResources_ListByBookshelf_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..f1c14bda49cf --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py @@ -0,0 +1,61 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.begin_create_or_update( + resource_group_name="rgdiscovery", + bookshelf_name="21b8f5a6a47fa1fdcc", + resource={ + "location": "uksouth", + "properties": { + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "identityClientId": "00000011-1111-2222-2222-123456789111", + "keyName": "tjjzitmclgtahulm", + "keyVaultUri": "https://microsoft.com/a", + "keyVersion": "dnoogjozeqlpubvkxwrujbncstsm", + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "privateEndpointSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/privateEndpointSubnet1", + "publicNetworkAccess": "Enabled", + "searchSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/searchSubnet1", + "workloadIdentities": {"key8334": {}}, + }, + "tags": {"key782": "hmvugqbu"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py new file mode 100644 index 000000000000..b8074abb2c16 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.bookshelves.begin_delete( + resource_group_name="rgdiscovery", + bookshelf_name="cdaa070c4d0ea7b9c9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py new file mode 100644 index 000000000000..993bc6900627 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.get( + resource_group_name="rgdiscovery", + bookshelf_name="85c2fc6e437c0b608b", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..e60dc746e7a0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..5a4512bc50fb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py new file mode 100644 index 000000000000..fbbcb56f737c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.begin_update( + resource_group_name="rgdiscovery", + bookshelf_name="c6189a7b33260c4a72", + properties={ + "properties": { + "keyVaultProperties": {"keyName": "b", "keyVersion": "kyf"}, + "publicNetworkAccess": "Enabled", + }, + "tags": {"key1792": "dnybouectwzjb"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..abb5d6ac4ffb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="eb2204766409e111d9", + chat_model_deployment_name="d1844ae17cc93bd299", + resource={ + "location": "uksouth", + "properties": {"modelFormat": "tcttsgevrsuflt", "modelName": "nvwdoluhukiachlyrdnpxusxsc"}, + "tags": {"key4822": "fpesmhjievwzxmhxszcgpztivcgw"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py new file mode 100644 index 000000000000..cb9d3096e471 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.chat_model_deployments.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="015403a79d07536250", + chat_model_deployment_name="7a1ee53e20d918a13d", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py new file mode 100644 index 000000000000..6c7d81fc9de5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.get( + resource_group_name="rgdiscovery", + workspace_name="715bd6631a63225578", + chat_model_deployment_name="7938c93c6f61d31f7e", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..f8ccc2c039a0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="715794cf970dc53142", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py new file mode 100644 index 000000000000..d8220d7d1385 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.begin_update( + resource_group_name="rgdiscovery", + workspace_name="438970fd7f0137032c", + chat_model_deployment_name="fd0837f1d866060b11", + properties={"properties": {}, "tags": {"key6223": "tvufnjfnrdadechkcyoboyrcme"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..57e5417847ff --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.begin_create_or_update( + resource_group_name="rgdiscovery", + supercomputer_name="8074da5c77f95509a8", + node_pool_name="5a88c24ec4e7091650", + resource={ + "location": "uksouth", + "properties": { + "maxNodeCount": 4, + "minNodeCount": 0, + "scaleSetPriority": "Regular", + "subnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/subnet1", + "vmSize": "Standard_NC24ads_A100_v4", + }, + "tags": {"key6074": "qlnvwgazrqmwauqqvxntjtoye"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py new file mode 100644 index 000000000000..18ff0c542244 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.node_pools.begin_delete( + resource_group_name="rgdiscovery", + supercomputer_name="6ddaf20b09c36fc7ef", + node_pool_name="6dcea29fcbc2279a3b", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py new file mode 100644 index 000000000000..cc4eea559b84 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.get( + resource_group_name="rgdiscovery", + supercomputer_name="3d4fce3989a31db9c7", + node_pool_name="80084da43e5c8bc50e", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py new file mode 100644 index 000000000000..d0c7b340187c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_list_by_supercomputer_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.list_by_supercomputer( + resource_group_name="rgdiscovery", + supercomputer_name="7cc28f3db7c8fa0087", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_ListBySupercomputer_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py new file mode 100644 index 000000000000..a809a66cf67a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.begin_update( + resource_group_name="rgdiscovery", + supercomputer_name="f674a0697b0c54044e", + node_pool_name="12ceb04d31658f1ec7", + properties={"properties": {"maxNodeCount": 21, "minNodeCount": 0}, "tags": {"key5366": "uyhhzfhedjkqanudogu"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py new file mode 100644 index 000000000000..5cac15277941 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python operations_list_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Operations_List_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py new file mode 100644 index 000000000000..e01c97ebd310 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python operations_list_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Operations_List_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..b63293a71157 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py @@ -0,0 +1,56 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="1cf6d2a754acbb9655", + project_name="55cdd016d5653ebf82", + resource={ + "location": "uksouth", + "properties": { + "settings": { + "behaviorPreferences": "zjhzrtkwdmwbueseguplzpxeqvbqrknwvxxgtwtpldnguihcbprdwsihucbrmhyvmxuvldlgtrheqehrpcmdqtjknlyjnzdvphjicifbuvlsjgoaiaeunshmuupogxhwywntzhdvrdkknumgyofeltjqyenfiemerqsafaphhzgkwqrnuhbxklclishnnailctvcdjzvfroakitqkmllziocaolmyvytjmqhivljovriyicparifitswaynjsczcpfsgwyjsojiwqzauscpgmxqhznkofwydrjauiwkwkjrvclbufqmzyftfwjkymalhwkpiabljammjstpsknxqouabruobyznqnscucrvurarbbtefmaqiqfkyykuifojikmnkfgnjyagxwpjilfjyfpkqjdgrupitpqbvebmhsizeomzxqekqbsqqnlkefolbgbnfavtliixrvqxcbcxrxnpozucsvseddpjzsydiebyxxdehaniinfvfbflqwmyqelsjquigikebmfuuhdervonditfsummrsuokoqtessdmwptawejkqkkmtzgomamsbcbpviejbirvdwbcoenrsxeyayglvygsknetjuxdmbroritqklncrrnstwuaoohrqnypxfgbvfofsabnrgofobhdkktjyuhrxzmkrwglqczjlrfxbcbrplqmocjbphkjpdfxrfpkfrvlqqmiwftsuhxmjpimvngakbpkcvdevwubfqjjpnjmoiruabwxtzqehlwangolxjeqzjbfxltrgchuiginrgaeaztcqwacogzhvuhxcolvlzmoulikspebsdjyqlzgrtkqobkszfspnjftmnzsbyctxhkjsyemlehnqqhvvfdtrfarpjgaklmvbwukaykstipsnejnpnwaxskppilmgcdebupwpyyqajergevjizlhkiinvvqojrnegwbilhaktgjfkkefnwfbcxmjgylidekqjvgkxvnxdrttxzoyndupwqvlebbfgyiddgixrhcdbfkgakpausgcmjsgpqsjonrrrodgzzkworpvxzjzgtdbzqerqwjhjwoescduqwdwxsgmgyxolhlpdhuvscmzuuiynntspcyjlyngexpardjklyycnyquvbtfwlnqzxujowoljfvpnbyyoqiblxiiojiaeqpoblnrgttqmjqvqjaawevmbxgmbsaumjutbxhywkvhrljzcpitnunwfvtyiglqwlcngffnzdnfvccfqgdiazxoxgsepfqlyoxnofvogtsudsxarqpxhpidrwbykeypszgcbbyshljhlxperpdysagbmumexubjzimbetfayqfcbdkdpfsqrfuisioggdewxmeuihqitkwqfvmmwfppxfoigaoskgzedlhtmjagcbvwwnfkeyocceioccfjmxvtjutydhwapxkgdgjxuquujkdjtbkwhrlbxgclyqdcdnexitthelxajidteqtahvjwoovaripeuzycrdazmmzvpcecahnyentvdqdfteddmddcllromsumkucqhcgdelqsotimumwulnplxqeckhwgngafyvjlykdyotciabfkkdbqlfgwxkjreelzdswbaqzhiweqopubmxlestbvcrdimtiyleisfsvheanokuaipbniseceonfqlbrfofrhbuwtuirnrooflajdyrzwgcmpztgeyzrgvohijxrwrgsmfxagjuuygqtbyilneezxkmcbjoyhljlnesiuhgznvxzglpemwqfpixqlppehxeqzwhuxfcknkmpfttpywxaimzrsfalarhtoiwenlulyoadfdcyvsahzngcashxrchwrxknrvzhldforskqsktvltfuuxhummulwcfvezmfedfobzyfryrrzbypsvakkgppvhhicjydgzmnhcxsfqrdwgbmitmzzmjyzgvzusjigujfohqdlgmrtxmvvxoaxyfiyrgvfawkyynjykpmejzqmrpzitetxhqcjrqspglkzricplkrecaiumajvsobcddohclhmyinyteblkxqoqnoiyfwspencqszjbsnpyokusxgptshytgzqkynhncmqgrxaeugwyzxxqubfdkozfqrdeveopnhyvjcpjziogeyinlvcbhwltteivcnkfehvatnbvkfoqedljupaxholukhagwcasdgagdhpmkiumdclzstyexknhlojeoqkuejmnbuhdwgskgbqfomxvkgziun" + }, + "storageContainerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/storageContainers/storageContainer12" + ], + }, + "tags": {"key717": "tdn"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py new file mode 100644 index 000000000000..17d1115e8dcd --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.projects.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="5020af62f469b308c0", + project_name="9ae1e783de71d4e949", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Projects_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py new file mode 100644 index 000000000000..4fb437af2aa9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.get( + resource_group_name="rgdiscovery", + workspace_name="aa8419d5add7095abd", + project_name="4884cf65356e9c3489", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..f04684406333 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="56c2d23d65c9121656", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Projects_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py new file mode 100644 index 000000000000..fe6b13aa1e12 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.begin_update( + resource_group_name="rgdiscovery", + workspace_name="11e14e4bec2ea791b4", + project_name="1e7dd7aa730b25cabf", + properties={ + "properties": { + "settings": { + "behaviorPreferences": "zjhzrtkwdmwbueseguplzpxeqvbqrknwvxxgtwtpldnguihcbprdwsihucbrmhyvmxuvldlgtrheqehrpcmdqtjknlyjnzdvphjicifbuvlsjgoaiaeunshmuupogxhwywntzhdvrdkknumgyofeltjqyenfiemerqsafaphhzgkwqrnuhbxklclishnnailctvcdjzvfroakitqkmllziocaolmyvytjmqhivljovriyicparifitswaynjsczcpfsgwyjsojiwqzauscpgmxqhznkofwydrjauiwkwkjrvclbufqmzyftfwjkymalhwkpiabljammjstpsknxqouabruobyznqnscucrvurarbbtefmaqiqfkyykuifojikmnkfgnjyagxwpjilfjyfpkqjdgrupitpqbvebmhsizeomzxqekqbsqqnlkefolbgbnfavtliixrvqxcbcxrxnpozucsvseddpjzsydiebyxxdehaniinfvfbflqwmyqelsjquigikebmfuuhdervonditfsummrsuokoqtessdmwptawejkqkkmtzgomamsbcbpviejbirvdwbcoenrsxeyayglvygsknetjuxdmbroritqklncrrnstwuaoohrqnypxfgbvfofsabnrgofobhdkktjyuhrxzmkrwglqczjlrfxbcbrplqmocjbphkjpdfxrfpkfrvlqqmiwftsuhxmjpimvngakbpkcvdevwubfqjjpnjmoiruabwxtzqehlwangolxjeqzjbfxltrgchuiginrgaeaztcqwacogzhvuhxcolvlzmoulikspebsdjyqlzgrtkqobkszfspnjftmnzsbyctxhkjsyemlehnqqhvvfdtrfarpjgaklmvbwukaykstipsnejnpnwaxskppilmgcdebupwpyyqajergevjizlhkiinvvqojrnegwbilhaktgjfkkefnwfbcxmjgylidekqjvgkxvnxdrttxzoyndupwqvlebbfgyiddgixrhcdbfkgakpausgcmjsgpqsjonrrrodgzzkworpvxzjzgtdbzqerqwjhjwoescduqwdwxsgmgyxolhlpdhuvscmzuuiynntspcyjlyngexpardjklyycnyquvbtfwlnqzxujowoljfvpnbyyoqiblxiiojiaeqpoblnrgttqmjqvqjaawevmbxgmbsaumjutbxhywkvhrljzcpitnunwfvtyiglqwlcngffnzdnfvccfqgdiazxoxgsepfqlyoxnofvogtsudsxarqpxhpidrwbykeypszgcbbyshljhlxperpdysagbmumexubjzimbetfayqfcbdkdpfsqrfuisioggdewxmeuihqitkwqfvmmwfppxfoigaoskgzedlhtmjagcbvwwnfkeyocceioccfjmxvtjutydhwapxkgdgjxuquujkdjtbkwhrlbxgclyqdcdnexitthelxajidteqtahvjwoovaripeuzycrdazmmzvpcecahnyentvdqdfteddmddcllromsumkucqhcgdelqsotimumwulnplxqeckhwgngafyvjlykdyotciabfkkdbqlfgwxkjreelzdswbaqzhiweqopubmxlestbvcrdimtiyleisfsvheanokuaipbniseceonfqlbrfofrhbuwtuirnrooflajdyrzwgcmpztgeyzrgvohijxrwrgsmfxagjuuygqtbyilneezxkmcbjoyhljlnesiuhgznvxzglpemwqfpixqlppehxeqzwhuxfcknkmpfttpywxaimzrsfalarhtoiwenlulyoadfdcyvsahzngcashxrchwrxknrvzhldforskqsktvltfuuxhummulwcfvezmfedfobzyfryrrzbypsvakkgppvhhicjydgzmnhcxsfqrdwgbmitmzzmjyzgvzusjigujfohqdlgmrtxmvvxoaxyfiyrgvfawkyynjykpmejzqmrpzitetxhqcjrqspglkzricplkrecaiumajvsobcddohclhmyinyteblkxqoqnoiyfwspencqszjbsnpyokusxgptshytgzqkynhncmqgrxaeugwyzxxqubfdkozfqrdeveopnhyvjcpjziogeyinlvcbhwltteivcnkfehvatnbvkfoqedljupaxholukhagwcasdgagdhpmkiumdclzstyexknhlojeoqkuejmnbuhdwgskgbqfomxvkgziun" + }, + "storageContainerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/storageContainers/storageContainer12" + ], + }, + "tags": {"key2596": "kpfaeffbnzkz"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..3578a49e801a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.begin_create_or_update( + resource_group_name="rgdiscovery", + storage_container_name="106b8981ac9ca95890", + storage_asset_name="8fd30c31448f7b0f1a", + resource={ + "location": "uksouth", + "properties": {"description": "gwlk", "path": "qmvrklgqdif"}, + "tags": {"key5443": "dneh"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py new file mode 100644 index 000000000000..fc5746458aa0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.storage_assets.begin_delete( + resource_group_name="rgdiscovery", + storage_container_name="f7e7a03c675ccffe1a", + storage_asset_name="8f741ee4588dc823fc", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py new file mode 100644 index 000000000000..87ad0438dda2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.get( + resource_group_name="rgdiscovery", + storage_container_name="edde0a4a016d7d6b2b", + storage_asset_name="5ea4bb40f40e2ef5e2", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py new file mode 100644 index 000000000000..7fe7724aff96 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_list_by_storage_container_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.list_by_storage_container( + resource_group_name="rgdiscovery", + storage_container_name="6b4fbcbb65873f18bf", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_ListByStorageContainer_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py new file mode 100644 index 000000000000..5dabb58daa9a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.begin_update( + resource_group_name="rgdiscovery", + storage_container_name="d177d30241e3f8a27d", + storage_asset_name="6cd8920c03970ccdfe", + properties={"properties": {"description": "tljmqqr"}, "tags": {"key5822": "jicwkfdyoqvgpoy"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..5fc99f68f909 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.begin_create_or_update( + resource_group_name="rgdiscovery", + storage_container_name="23ae33a54872c83164", + resource={ + "location": "uksouth", + "properties": { + "storageStore": { + "kind": "AzureStorageBlob", + "storageAccountId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/storageaccount", + } + }, + "tags": {"key9976": "waghigmzxlvfqwribpxamwx"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py new file mode 100644 index 000000000000..d4c5a1a443e5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.storage_containers.begin_delete( + resource_group_name="rgdiscovery", + storage_container_name="861edbda8228e6d8c9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py new file mode 100644 index 000000000000..83ae374821d9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.get( + resource_group_name="rgdiscovery", + storage_container_name="8f3eba3d81d78de900", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..f41f373dbad8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..65a16969c8f6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py new file mode 100644 index 000000000000..1ab604fd2647 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.begin_update( + resource_group_name="rgdiscovery", + storage_container_name="5c26ac8738c893ec11", + properties={"properties": {}, "tags": {"key5909": "hdhfnp"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..c8c562ce8946 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py @@ -0,0 +1,65 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.begin_create_or_update( + resource_group_name="rgdiscovery", + supercomputer_name="85fd61f68e7207bbd3", + resource={ + "location": "uksouth", + "properties": { + "customerManagedKeys": "Enabled", + "diskEncryptionSetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Compute/diskEncryptionSets/diskencryptionset1", + "identities": { + "clusterIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "kubeletIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "workloadIdentities": {"key1149": {}}, + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "managementSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/managementSubnet1", + "outboundType": "LoadBalancer", + "subnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/subnet1", + "systemSku": "Standard_D4s_v6", + }, + "tags": {"key5625": "spcjwrxnslfkiqbzdkhhbano"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py new file mode 100644 index 000000000000..81dcaba509e9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.supercomputers.begin_delete( + resource_group_name="rgdiscovery", + supercomputer_name="44f7621cf75873fb53", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py new file mode 100644 index 000000000000..09da375a6906 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.get( + resource_group_name="rgdiscovery", + supercomputer_name="b6807d2513b2fdb240", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..f894d0623aab --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..358201caef65 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py new file mode 100644 index 000000000000..3c82b1402f8a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.begin_update( + resource_group_name="rgdiscovery", + supercomputer_name="a60016ec51d9d8e35d", + properties={ + "properties": {"identities": {"workloadIdentities": {"key3032": {}}}}, + "tags": {"key9318": "xawwf"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..8c9061c1a398 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py @@ -0,0 +1,88 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.begin_create_or_update( + resource_group_name="rgdiscovery", + tool_name="b5d4239f788c20b58b", + resource={ + "location": "uksouth", + "properties": { + "definitionContent": { + "actions": [ + { + "command": "python3 submit_dft.py ", + "description": "Optimize geometry of 'xyz's from the input data asset. This is a prerequisite for all other discovery computations.", + "environment_variables": [ + {"name": "OUTPUT_DIRECTORY_PATH", "value": "{{ outputDataAssetId }}"} + ], + "input_schema": { + "properties": { + "basisSet": { + "description": "Basis set. Must be one of the supported basis sets (e.g., def2-svp, def2-tzvp).", + "type": "string", + }, + "inputDataAssetId": { + "description": "Identifier of the input data asset", + "type": "string", + }, + "outputDataAssetId": { + "description": "Identifier to use for the new output data asset which will be created.", + "type": "string", + }, + "xyzColumnName": { + "description": "Column containing xyz data within the input data table asset", + "type": "string", + }, + }, + "required": ["inputDataAssetId", "xyzColumnName"], + "type": "object", + }, + "name": "GeometryOptimization", + } + ], + "description": "Advanced DFT computational tools for molecular geometry optimization and property calculations", + "name": "discovery", + "tool_id": "discovery-m1", + }, + "environmentVariables": {"key5460": "xtjzjghbist"}, + "version": "sjepxewtq", + }, + "tags": {"key2611": "cgsblxvyzevbd"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py new file mode 100644 index 000000000000..e38da9b4746d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.tools.begin_delete( + resource_group_name="rgdiscovery", + tool_name="d0e8e07484db1bb9a9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Tools_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py new file mode 100644 index 000000000000..a6886303264b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.get( + resource_group_name="rgdiscovery", + tool_name="30ebfda6785888d26f", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..93839c36735f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Tools_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..517f435b70ba --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Tools_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py new file mode 100644 index 000000000000..49117ed70a14 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py @@ -0,0 +1,87 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.begin_update( + resource_group_name="rgdiscovery", + tool_name="f1972d0fc9531d424c", + properties={ + "properties": { + "definitionContent": { + "actions": [ + { + "command": "python3 submit_dft.py ", + "description": "Optimize geometry of 'xyz's from the input data asset. This is a prerequisite for all other discovery computations.", + "environment_variables": [ + {"name": "OUTPUT_DIRECTORY_PATH", "value": "{{ outputDataAssetId }}"} + ], + "input_schema": { + "properties": { + "basisSet": { + "description": "Basis set. Must be one of the supported basis sets (e.g., def2-svp, def2-tzvp).", + "type": "string", + }, + "inputDataAssetId": { + "description": "Identifier of the input data asset", + "type": "string", + }, + "outputDataAssetId": { + "description": "Identifier to use for the new output data asset which will be created.", + "type": "string", + }, + "xyzColumnName": { + "description": "Column containing xyz data within the input data table asset", + "type": "string", + }, + }, + "required": ["inputDataAssetId", "xyzColumnName"], + "type": "object", + }, + "name": "GeometryOptimization", + } + ], + "description": "Advanced DFT computational tools for molecular geometry optimization and property calculations", + "name": "discovery", + "tool_id": "discovery-m1", + }, + "environmentVariables": {"key3840": "snaxrtkryhwqw"}, + "version": "jittnzvso", + }, + "tags": {"key4187": "vbirsnehukndlpioqtsmqyoqhklg"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..8acb692f15d8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="2b2ef1dfc273d99493", + private_endpoint_connection_name="connection", + resource={ + "properties": { + "privateEndpoint": {}, + "privateLinkServiceConnectionState": { + "actionsRequired": "vgqhrxvmviabfgmafqtbej", + "description": "lknyprq", + "status": "Pending", + }, + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py new file mode 100644 index 000000000000..5b8e2b7098e3 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.workspace_private_endpoint_connections.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="1e2a3df721db9f3406", + private_endpoint_connection_name="connection", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py new file mode 100644 index 000000000000..33184a17ef9b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.get( + resource_group_name="rgdiscovery", + workspace_name="16e7096454e0394819", + private_endpoint_connection_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..41412607fb1f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="cc28db0ff1bebbe39b", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py new file mode 100644 index 000000000000..6f8999ad1f72 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_link_resources_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_link_resources.get( + resource_group_name="rgdiscovery", + workspace_name="68b05b24fa2cc1a943", + private_link_resource_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateLinkResources_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..f95206178269 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_link_resources_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_link_resources.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="3a737dc9780bdefdff", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateLinkResources_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..53403dda4f7a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py @@ -0,0 +1,66 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="b8d58cd85996a6dea3", + resource={ + "location": "uksouth", + "properties": { + "agentSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/agentSubnet1", + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyName": "yfplarzdfwsut", + "keyVaultUri": "https://microsoft.com/a", + "keyVersion": "qlsjcf", + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "privateEndpointSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/privateEndpointSubnet1", + "publicNetworkAccess": "Enabled", + "supercomputerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/supercomputers/supercomputer12" + ], + "workspaceIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "workspaceSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/workspaceSubnet1", + }, + "tags": {"key5364": "xiwdefebkbfffgqlzmqaeqsqeq"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py new file mode 100644 index 000000000000..c4e8bc28ce22 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.workspaces.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="f1559ab1ef72a2eae5", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py new file mode 100644 index 000000000000..e8d134f88fc9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.get( + resource_group_name="rgdiscovery", + workspace_name="0e6a06e55e7efe8f07", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..d9ffa31bc738 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..2139ca564d99 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py new file mode 100644 index 000000000000..cc7a66edafd7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py @@ -0,0 +1,56 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.begin_update( + resource_group_name="rgdiscovery", + workspace_name="43ac331aecf462b646", + properties={ + "properties": { + "keyVaultProperties": { + "keyName": "oxxinrlglrdihfqjrpkjc", + "keyVersion": "xbvilcphokrwachseulvwywaekfh", + }, + "publicNetworkAccess": "Enabled", + "supercomputerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/supercomputers/supercomputer12" + ], + }, + "tags": {"key6612": "ca"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py new file mode 100644 index 000000000000..94f7843fe01d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + discoverymgmt_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..cbbe2fbe4f14 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelfPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_get(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_begin_delete(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..66afc8eee4f9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelfPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_get(self, resource_group): + response = await self.client.bookshelf_private_endpoint_connections.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = await ( + await self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_begin_delete(self, resource_group): + response = await ( + await self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py new file mode 100644 index 000000000000..0d765dfbe829 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelfPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_link_resources_get(self, resource_group): + response = self.client.bookshelf_private_link_resources.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_link_resources_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py new file mode 100644 index 000000000000..5d40f6fa1cf6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelfPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_link_resources_get(self, resource_group): + response = await self.client.bookshelf_private_link_resources.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_link_resources_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py new file mode 100644 index 000000000000..b05a5853b1eb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py @@ -0,0 +1,197 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelvesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_get(self, resource_group): + response = self.client.bookshelves.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_create_or_update(self, resource_group): + response = self.client.bookshelves.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_update(self, resource_group): + response = self.client.bookshelves.begin_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_delete(self, resource_group): + response = self.client.bookshelves.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_list_by_resource_group(self, resource_group): + response = self.client.bookshelves.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_list_by_subscription(self, resource_group): + response = self.client.bookshelves.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py new file mode 100644 index 000000000000..a8c04de65510 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py @@ -0,0 +1,204 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtBookshelvesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_get(self, resource_group): + response = await self.client.bookshelves.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_create_or_update(self, resource_group): + response = await ( + await self.client.bookshelves.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_update(self, resource_group): + response = await ( + await self.client.bookshelves.begin_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_delete(self, resource_group): + response = await ( + await self.client.bookshelves.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_list_by_resource_group(self, resource_group): + response = self.client.bookshelves.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_list_by_subscription(self, resource_group): + response = self.client.bookshelves.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py new file mode 100644 index 000000000000..3bd49073761c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtChatModelDeploymentsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_get(self, resource_group): + response = self.client.chat_model_deployments.get( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_create_or_update(self, resource_group): + response = self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_update(self, resource_group): + response = self.client.chat_model_deployments.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_delete(self, resource_group): + response = self.client.chat_model_deployments.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_list_by_workspace(self, resource_group): + response = self.client.chat_model_deployments.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py new file mode 100644 index 000000000000..8d11142138ff --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtChatModelDeploymentsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_get(self, resource_group): + response = await self.client.chat_model_deployments.get( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_create_or_update(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_update(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_delete(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_list_by_workspace(self, resource_group): + response = self.client.chat_model_deployments.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py new file mode 100644 index 000000000000..b417724dbac5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtNodePoolsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_get(self, resource_group): + response = self.client.node_pools.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_create_or_update(self, resource_group): + response = self.client.node_pools.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_update(self, resource_group): + response = self.client.node_pools.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_delete(self, resource_group): + response = self.client.node_pools.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_list_by_supercomputer(self, resource_group): + response = self.client.node_pools.list_by_supercomputer( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py new file mode 100644 index 000000000000..d641d1fa68d9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtNodePoolsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_get(self, resource_group): + response = await self.client.node_pools.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_create_or_update(self, resource_group): + response = await ( + await self.client.node_pools.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_update(self, resource_group): + response = await ( + await self.client.node_pools.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_delete(self, resource_group): + response = await ( + await self.client.node_pools.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_list_by_supercomputer(self, resource_group): + response = self.client.node_pools.list_by_supercomputer( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py new file mode 100644 index 000000000000..1123e4821f63 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py new file mode 100644 index 000000000000..54ee10145b6b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py new file mode 100644 index 000000000000..db6c8817c96e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtProjectsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_get(self, resource_group): + response = self.client.projects.get( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_create_or_update(self, resource_group): + response = self.client.projects.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_update(self, resource_group): + response = self.client.projects.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_delete(self, resource_group): + response = self.client.projects.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_list_by_workspace(self, resource_group): + response = self.client.projects.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py new file mode 100644 index 000000000000..64f11baba1d5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtProjectsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_get(self, resource_group): + response = await self.client.projects.get( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_create_or_update(self, resource_group): + response = await ( + await self.client.projects.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_update(self, resource_group): + response = await ( + await self.client.projects.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_delete(self, resource_group): + response = await ( + await self.client.projects.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_list_by_workspace(self, resource_group): + response = self.client.projects.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py new file mode 100644 index 000000000000..e047031e9f08 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtStorageAssetsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_get(self, resource_group): + response = self.client.storage_assets.get( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_create_or_update(self, resource_group): + response = self.client.storage_assets.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_update(self, resource_group): + response = self.client.storage_assets.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_delete(self, resource_group): + response = self.client.storage_assets.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_list_by_storage_container(self, resource_group): + response = self.client.storage_assets.list_by_storage_container( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py new file mode 100644 index 000000000000..382cdd9eae09 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtStorageAssetsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_get(self, resource_group): + response = await self.client.storage_assets.get( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_create_or_update(self, resource_group): + response = await ( + await self.client.storage_assets.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_update(self, resource_group): + response = await ( + await self.client.storage_assets.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_delete(self, resource_group): + response = await ( + await self.client.storage_assets.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_list_by_storage_container(self, resource_group): + response = self.client.storage_assets.list_by_storage_container( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py new file mode 100644 index 000000000000..0428af77d1a1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtStorageContainersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_get(self, resource_group): + response = self.client.storage_containers.get( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_create_or_update(self, resource_group): + response = self.client.storage_containers.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_update(self, resource_group): + response = self.client.storage_containers.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_delete(self, resource_group): + response = self.client.storage_containers.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_list_by_resource_group(self, resource_group): + response = self.client.storage_containers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_list_by_subscription(self, resource_group): + response = self.client.storage_containers.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py new file mode 100644 index 000000000000..a5e8e22dcf33 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtStorageContainersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_get(self, resource_group): + response = await self.client.storage_containers.get( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_create_or_update(self, resource_group): + response = await ( + await self.client.storage_containers.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_update(self, resource_group): + response = await ( + await self.client.storage_containers.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_delete(self, resource_group): + response = await ( + await self.client.storage_containers.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_list_by_resource_group(self, resource_group): + response = self.client.storage_containers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_list_by_subscription(self, resource_group): + response = self.client.storage_containers.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py new file mode 100644 index 000000000000..cc16b5e02832 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py @@ -0,0 +1,145 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtSupercomputersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_get(self, resource_group): + response = self.client.supercomputers.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_create_or_update(self, resource_group): + response = self.client.supercomputers.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_update(self, resource_group): + response = self.client.supercomputers.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_delete(self, resource_group): + response = self.client.supercomputers.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_list_by_resource_group(self, resource_group): + response = self.client.supercomputers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_list_by_subscription(self, resource_group): + response = self.client.supercomputers.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py new file mode 100644 index 000000000000..5a918c3a4769 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtSupercomputersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_get(self, resource_group): + response = await self.client.supercomputers.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_create_or_update(self, resource_group): + response = await ( + await self.client.supercomputers.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_update(self, resource_group): + response = await ( + await self.client.supercomputers.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_delete(self, resource_group): + response = await ( + await self.client.supercomputers.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_list_by_resource_group(self, resource_group): + response = self.client.supercomputers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_list_by_subscription(self, resource_group): + response = self.client.supercomputers.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py new file mode 100644 index 000000000000..9ffadde34092 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtToolsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_get(self, resource_group): + response = self.client.tools.get( + resource_group_name=resource_group.name, + tool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_create_or_update(self, resource_group): + response = self.client.tools.begin_create_or_update( + resource_group_name=resource_group.name, + tool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_update(self, resource_group): + response = self.client.tools.begin_update( + resource_group_name=resource_group.name, + tool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_delete(self, resource_group): + response = self.client.tools.begin_delete( + resource_group_name=resource_group.name, + tool_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_list_by_resource_group(self, resource_group): + response = self.client.tools.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_list_by_subscription(self, resource_group): + response = self.client.tools.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py new file mode 100644 index 000000000000..d07f79805da9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py @@ -0,0 +1,130 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtToolsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_get(self, resource_group): + response = await self.client.tools.get( + resource_group_name=resource_group.name, + tool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_create_or_update(self, resource_group): + response = await ( + await self.client.tools.begin_create_or_update( + resource_group_name=resource_group.name, + tool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_update(self, resource_group): + response = await ( + await self.client.tools.begin_update( + resource_group_name=resource_group.name, + tool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_delete(self, resource_group): + response = await ( + await self.client.tools.begin_delete( + resource_group_name=resource_group.name, + tool_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_list_by_resource_group(self, resource_group): + response = self.client.tools.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_list_by_subscription(self, resource_group): + response = self.client.tools.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..1c14d49687f2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacePrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_get(self, resource_group): + response = self.client.workspace_private_endpoint_connections.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_begin_delete(self, resource_group): + response = self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_list_by_workspace(self, resource_group): + response = self.client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..ca43727b70aa --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacePrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_get(self, resource_group): + response = await self.client.workspace_private_endpoint_connections.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = await ( + await self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_begin_delete(self, resource_group): + response = await ( + await self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_list_by_workspace(self, resource_group): + response = self.client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py new file mode 100644 index 000000000000..96e9b4b35cd6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacePrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_link_resources_get(self, resource_group): + response = self.client.workspace_private_link_resources.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_link_resources_list_by_workspace(self, resource_group): + response = self.client.workspace_private_link_resources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py new file mode 100644 index 000000000000..4bc8d39d14f1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacePrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_link_resources_get(self, resource_group): + response = await self.client.workspace_private_link_resources.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_link_resources_list_by_workspace(self, resource_group): + response = self.client.workspace_private_link_resources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py new file mode 100644 index 000000000000..0c6ef85250d0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py @@ -0,0 +1,193 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_get(self, resource_group): + response = self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_create_or_update(self, resource_group): + response = self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_update(self, resource_group): + response = self.client.workspaces.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_delete(self, resource_group): + response = self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_list_by_subscription(self, resource_group): + response = self.client.workspaces.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py new file mode 100644 index 000000000000..dbe007b808d0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py @@ -0,0 +1,200 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryMgmtWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_get(self, resource_group): + response = await self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_create_or_update(self, resource_group): + response = await ( + await self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_update(self, resource_group): + response = await ( + await self.client.workspaces.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_delete(self, resource_group): + response = await ( + await self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_list_by_subscription(self, resource_group): + response = self.client.workspaces.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/pyproject.toml b/sdk/discovery/azure-mgmt-discovery/pyproject.toml new file mode 100644 index 000000000000..48002281ebf8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/pyproject.toml @@ -0,0 +1,86 @@ +[build-system] +requires = [ + "setuptools>=77.0.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-mgmt-discovery" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Azure Discovery Management Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = [ + "azure", + "azure sdk", +] +dependencies = [ + "isodate>=0.6.1", + "azure-mgmt-core>=1.6.0", + "typing-extensions>=4.6.0", +] +dynamic = [ + "version", + "readme", +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic.version] +attr = "azure.mgmt.discovery._version.VERSION" + +[tool.setuptools.dynamic.readme] +file = [ + "README.md", + "CHANGELOG.md", +] +content-type = "text/markdown" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.mgmt", +] + +[tool.setuptools.package-data] +pytyped = [ + "py.typed", +] + +[tool.azure-sdk-build] +breaking = false +pyright = false +mypy = false + +[packaging] +package_name = "azure-mgmt-discovery" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "Discovery Management" +package_doc_id = "" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true +sample_link = "" +exclude_folders = "" +title = "DiscoveryMgmtClient" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/__init__.py b/sdk/discovery/azure-mgmt-discovery/tests/__init__.py new file mode 100644 index 000000000000..b74cfa3b899c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/__init__.py @@ -0,0 +1,4 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ diff --git a/sdk/discovery/azure-mgmt-discovery/tests/conftest.py b/sdk/discovery/azure-mgmt-discovery/tests/conftest.py new file mode 100644 index 000000000000..21acae3a2965 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/conftest.py @@ -0,0 +1,34 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Pytest configuration for azure-mgmt-discovery tests. + +Management SDK tests run as live-only tests (no recordings needed). +""" +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid recording sensitive identity information +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + """Add sanitizers to remove sensitive information.""" + subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=client_secret, value="00000000-0000-0000-0000-000000000000") + add_header_regex_sanitizer(key="Authorization", value="Sanitized") diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py new file mode 100644 index 000000000000..b9c39c855581 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py @@ -0,0 +1,73 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Bookshelves operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP + + +class TestBookshelves(DiscoveryMgmtTestCase): + """Tests for Bookshelves operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = AZURE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_bookshelves_by_subscription(self): + """Test listing bookshelves in the subscription.""" + bookshelves = list(self.client.bookshelves.list_by_subscription()) + assert isinstance(bookshelves, list) + + @recorded_by_proxy + def test_list_bookshelves_by_resource_group(self): + """Test listing bookshelves in a resource group.""" + bookshelves = list(self.client.bookshelves.list_by_resource_group(self.resource_group)) + assert isinstance(bookshelves, list) + + @recorded_by_proxy + def test_get_bookshelf(self): + """Test getting a specific bookshelf by name.""" + bookshelf = self.client.bookshelves.get(self.resource_group, "test-bookshelf-05fbc43d") + assert bookshelf is not None + assert hasattr(bookshelf, "name") + assert hasattr(bookshelf, "location") + + @recorded_by_proxy + def test_create_bookshelf(self): + """Test creating a bookshelf.""" + bookshelf_data = {"location": "uksouth"} + operation = self.client.bookshelves.begin_create_or_update( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-324938be", + resource=bookshelf_data, + ) + bookshelf = operation.result() + assert bookshelf is not None + + @recorded_by_proxy + def test_update_bookshelf(self): + """Test updating a bookshelf.""" + bookshelf_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.bookshelves.begin_update( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-05fbc43d", + properties=bookshelf_data, + ) + updated_bookshelf = operation.result() + assert updated_bookshelf is not None + + @recorded_by_proxy + def test_delete_bookshelf(self): + """Test deleting a bookshelf.""" + operation = self.client.bookshelves.begin_delete( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-9379e896", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py new file mode 100644 index 000000000000..0d726ed40d65 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py @@ -0,0 +1,63 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for ChatModelDeployments operations.""" +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group that has a workspace +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "test-wrksp-create01" + + +class TestChatModelDeployments(DiscoveryMgmtTestCase): + """Tests for ChatModelDeployments operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + + @recorded_by_proxy + def test_list_chat_model_deployments_by_workspace(self): + """Test listing chat model deployments in a workspace.""" + deployments = list( + self.client.chat_model_deployments.list_by_workspace(self.resource_group, self.workspace_name) + ) + assert isinstance(deployments, list) + + @recorded_by_proxy + def test_get_chat_model_deployment(self): + """Test getting a specific chat model deployment by name.""" + deployment = self.client.chat_model_deployments.get( + self.resource_group, self.workspace_name, "test-deploy-chatmodel01" + ) + assert deployment is not None + assert hasattr(deployment, "name") + + @recorded_by_proxy + def test_create_chat_model_deployment(self): + """Test creating a chat model deployment.""" + deployment_data = {"location": "uksouth", "properties": {"modelFormat": "OpenAI", "modelName": "gpt-4o"}} + operation = self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + chat_model_deployment_name="test-deploy-chatmodel01", + resource=deployment_data, + ) + deployment = operation.result() + assert deployment is not None + + @recorded_by_proxy + def test_delete_chat_model_deployment(self): + """Test deleting a chat model deployment.""" + operation = self.client.chat_model_deployments.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + chat_model_deployment_name="test-deploy-chatmodel01", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py new file mode 100644 index 000000000000..afa6ba92a1ee --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py @@ -0,0 +1,91 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for NodePools operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group and supercomputer that contain node pools +NODE_POOL_RESOURCE_GROUP = "olawal" +NODE_POOL_SUPERCOMPUTER_NAME = "itsuperp114" + + +class TestNodePools(DiscoveryMgmtTestCase): + """Tests for NodePools operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = NODE_POOL_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_node_pools_by_supercomputer(self): + """Test listing node pools in a supercomputer.""" + node_pools = list(self.client.node_pools.list_by_supercomputer("rp114-rg", NODE_POOL_SUPERCOMPUTER_NAME)) + assert isinstance(node_pools, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_node_pool(self): + """Test getting a specific node pool by name.""" + supercomputer_name = "test-supercomputer" + node_pool = self.client.node_pools.get(self.resource_group, supercomputer_name, "test-nodepool") + assert node_pool is not None + assert hasattr(node_pool, "name") + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_node_pool(self): + """Test creating a node pool.""" + supercomputer_name = "test-sc-2bbb25b8" + node_pool_data = { + "location": "uksouth", + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "vmSize": "Standard_D4s_v6", + "maxNodeCount": 3, + "minNodeCount": 1, + "scaleSetPriority": "Regular", + }, + } + operation = self.client.node_pools.begin_create_or_update( + resource_group_name="olawal", + supercomputer_name=supercomputer_name, + node_pool_name="test-np-568f7883", + resource=node_pool_data, + ) + node_pool = operation.result() + assert node_pool is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_update_node_pool(self): + """Test updating a node pool.""" + supercomputer_name = "test-supercomputer" + node_pool_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.node_pools.begin_create_or_update( + resource_group_name=self.resource_group, + supercomputer_name=supercomputer_name, + node_pool_name="test-nodepool", + resource=node_pool_data, + ) + updated_node_pool = operation.result() + assert updated_node_pool is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_node_pool(self): + """Test deleting a node pool.""" + supercomputer_name = "test-supercomputer" + operation = self.client.node_pools.begin_delete( + resource_group_name=self.resource_group, + supercomputer_name=supercomputer_name, + node_pool_name="nodepool-to-delete", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py new file mode 100644 index 000000000000..4f03ed81416e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py @@ -0,0 +1,24 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Operations operations.""" +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +class TestOperations(DiscoveryMgmtTestCase): + """Tests for Operations operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + + @recorded_by_proxy + def test_list_operations(self): + """Test listing available API operations.""" + operations = list(self.client.operations.list()) + assert len(operations) > 0 + assert hasattr(operations[0], "name") diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py new file mode 100644 index 000000000000..04c3c579c390 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py @@ -0,0 +1,166 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Private Endpoint related operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group and resources for testing +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "wrksptest44" +BOOKSHELF_NAME = "test-bookshelf" + + +class TestPrivateEndpoints(DiscoveryMgmtTestCase): + """Tests for Private Endpoint related operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + self.bookshelf_name = BOOKSHELF_NAME + + # ============ Workspace Private Endpoint Connection Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_workspace_private_endpoint_connections(self): + """Test listing workspace private endpoint connections.""" + connections = list( + self.client.workspace_private_endpoint_connections.list_by_workspace( + self.resource_group, self.workspace_name + ) + ) + assert isinstance(connections, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_workspace_private_endpoint_connection(self): + """Test getting a workspace private endpoint connection.""" + connection_name = "test-pe-connection" + connection = self.client.workspace_private_endpoint_connections.get( + self.resource_group, self.workspace_name, connection_name + ) + assert connection is not None + assert hasattr(connection, "name") + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_workspace_private_endpoint_connection(self): + """Test creating a workspace private endpoint connection.""" + connection_name = "test-pe-connection" + connection_data = {"properties": {"privateLinkServiceConnectionState": {"status": "Approved"}}} + operation = self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + private_endpoint_connection_name=connection_name, + resource=connection_data, + ) + connection = operation.result() + assert connection is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_workspace_private_endpoint_connection(self): + """Test deleting a workspace private endpoint connection.""" + connection_name = "pe-conn-to-delete" + operation = self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + private_endpoint_connection_name=connection_name, + ) + operation.result() + + # ============ Workspace Private Link Resource Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_workspace_private_link_resources(self): + """Test listing workspace private link resources.""" + link_resources = list( + self.client.workspace_private_link_resources.list_by_workspace(self.resource_group, self.workspace_name) + ) + assert isinstance(link_resources, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_workspace_private_link_resource(self): + """Test getting a workspace private link resource.""" + link_resource_name = "workspace" + link_resource = self.client.workspace_private_link_resources.get( + self.resource_group, self.workspace_name, link_resource_name + ) + assert link_resource is not None + + # ============ Bookshelf Private Endpoint Connection Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_bookshelf_private_endpoint_connections(self): + """Test listing bookshelf private endpoint connections.""" + connections = list( + self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + self.resource_group, self.bookshelf_name + ) + ) + assert isinstance(connections, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_bookshelf_private_endpoint_connection(self): + """Test getting a bookshelf private endpoint connection.""" + connection_name = "test-pe-connection" + connection = self.client.bookshelf_private_endpoint_connections.get( + self.resource_group, self.bookshelf_name, connection_name + ) + assert connection is not None + assert hasattr(connection, "name") + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_bookshelf_private_endpoint_connection(self): + """Test creating a bookshelf private endpoint connection.""" + connection_name = "test-pe-connection" + connection_data = {"properties": {"privateLinkServiceConnectionState": {"status": "Approved"}}} + operation = self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=self.resource_group, + bookshelf_name=self.bookshelf_name, + private_endpoint_connection_name=connection_name, + resource=connection_data, + ) + connection = operation.result() + assert connection is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_bookshelf_private_endpoint_connection(self): + """Test deleting a bookshelf private endpoint connection.""" + connection_name = "pe-conn-to-delete" + operation = self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=self.resource_group, + bookshelf_name=self.bookshelf_name, + private_endpoint_connection_name=connection_name, + ) + operation.result() + + # ============ Bookshelf Private Link Resource Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_bookshelf_private_link_resources(self): + """Test listing bookshelf private link resources.""" + link_resources = list( + self.client.bookshelf_private_link_resources.list_by_bookshelf(self.resource_group, self.bookshelf_name) + ) + assert isinstance(link_resources, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_bookshelf_private_link_resource(self): + """Test getting a bookshelf private link resource.""" + link_resource_name = "bookshelf" + link_resource = self.client.bookshelf_private_link_resources.get( + self.resource_group, self.bookshelf_name, link_resource_name + ) + assert link_resource is not None diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py new file mode 100644 index 000000000000..c1d04e720eda --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py @@ -0,0 +1,82 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Projects operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group that has a workspace +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "wrksptest44" + + +class TestProjects(DiscoveryMgmtTestCase): + """Tests for Projects operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + + @recorded_by_proxy + def test_list_projects_by_workspace(self): + """Test listing projects in a workspace.""" + projects = list(self.client.projects.list_by_workspace("newapiversiontest", self.workspace_name)) + assert isinstance(projects, list) + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_project(self): + """Test getting a specific project by name.""" + # TODO: Replace with actual project name from test environment + project = self.client.projects.get(self.resource_group, self.workspace_name, "test-project") + assert project is not None + assert hasattr(project, "name") + assert hasattr(project, "location") + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_project(self): + """Test creating a project.""" + unique_name = "test-proj-placeholder" + project_data = {"location": "uksouth"} + operation = self.client.projects.begin_create_or_update( + resource_group_name="olawal", + workspace_name=self.workspace_name, + project_name=unique_name, + resource=project_data, + ) + project = operation.result() + assert project is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_update_project(self): + """Test updating a project.""" + project_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.projects.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + project_name="test-project", + resource=project_data, + ) + updated_project = operation.result() + assert updated_project is not None + + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_project(self): + """Test deleting a project.""" + operation = self.client.projects.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + project_name="project-to-delete", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py new file mode 100644 index 000000000000..ec7978c255d3 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py @@ -0,0 +1,81 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Storage Assets operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group and storage container for storage asset tests +STORAGE_ASSET_RESOURCE_GROUP = "olawal" +STORAGE_ASSET_CONTAINER_NAME = "test-sc-8bef0d1a" + + +class TestStorageAssets(DiscoveryMgmtTestCase): + """Tests for Storage Assets operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = STORAGE_ASSET_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_storage_assets_by_storage_container(self): + """Test listing storage assets in a storage container.""" + assets = list( + self.client.storage_assets.list_by_storage_container(self.resource_group, STORAGE_ASSET_CONTAINER_NAME) + ) + assert isinstance(assets, list) + + @recorded_by_proxy + def test_get_storage_asset(self): + """Test getting a specific storage asset by name.""" + storage_container_name = "test-sc-8bef0d1a" + asset = self.client.storage_assets.get(self.resource_group, storage_container_name, "test-sa-482ad005") + assert asset is not None + assert hasattr(asset, "name") + + @recorded_by_proxy + def test_create_storage_asset(self): + """Test creating a storage asset.""" + storage_container_name = "test-sc-8bef0d1a" + asset_data = { + "location": "uksouth", + "properties": {"description": "Test storage asset for SDK validation", "path": "data/test-assets"}, + } + operation = self.client.storage_assets.begin_create_or_update( + resource_group_name="olawal", + storage_container_name=storage_container_name, + storage_asset_name="test-sa-482ad005", + resource=asset_data, + ) + asset = operation.result() + assert asset is not None + + @recorded_by_proxy + def test_update_storage_asset(self): + """Test updating a storage asset.""" + asset_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.storage_assets.begin_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + storage_asset_name="test-sa-482ad005", + properties=asset_data, + ) + updated_asset = operation.result() + assert updated_asset is not None + + @recorded_by_proxy + def test_delete_storage_asset(self): + """Test deleting a storage asset.""" + operation = self.client.storage_assets.begin_delete( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + storage_asset_name="test-sa-482ad005", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py new file mode 100644 index 000000000000..ed1db8dc3b55 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py @@ -0,0 +1,84 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for StorageContainers operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group that contains storage containers +STORAGE_CONTAINER_RESOURCE_GROUP = "olawal" + + +class TestStorageContainers(DiscoveryMgmtTestCase): + """Tests for StorageContainers operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = STORAGE_CONTAINER_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_storage_containers_by_resource_group(self): + """Test listing storage containers in a resource group.""" + containers = list(self.client.storage_containers.list_by_resource_group(self.resource_group)) + assert isinstance(containers, list) + + @recorded_by_proxy + def test_list_storage_containers_by_subscription(self): + """Test listing storage containers in the subscription.""" + containers = list(self.client.storage_containers.list_by_subscription()) + assert isinstance(containers, list) + + @recorded_by_proxy + def test_get_storage_container(self): + """Test getting a specific storage container by name.""" + container = self.client.storage_containers.get(self.resource_group, "test-sc-8bef0d1a") + assert container is not None + assert hasattr(container, "name") + + @recorded_by_proxy + def test_create_storage_container(self): + """Test creating a storage container.""" + container_data = { + "location": "uksouth", + "properties": { + "storageStore": { + "kind": "AzureStorageBlob", + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + } + }, + } + operation = self.client.storage_containers.begin_create_or_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + resource=container_data, + ) + container = operation.result() + assert container is not None + + @recorded_by_proxy + def test_update_storage_container(self): + """Test updating a storage container.""" + container_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.storage_containers.begin_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + properties=container_data, + ) + updated_container = operation.result() + assert updated_container is not None + + @recorded_by_proxy + def test_delete_storage_container(self): + """Test deleting a storage container.""" + operation = self.client.storage_containers.begin_delete( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py new file mode 100644 index 000000000000..94e5cedd641b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py @@ -0,0 +1,89 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Supercomputers operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group that contains supercomputers +SUPERCOMPUTER_RESOURCE_GROUP = "olawal" + + +class TestSupercomputers(DiscoveryMgmtTestCase): + """Tests for Supercomputers operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = SUPERCOMPUTER_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_supercomputers_by_resource_group(self): + """Test listing supercomputers in a resource group.""" + supercomputers = list(self.client.supercomputers.list_by_resource_group(self.resource_group)) + assert isinstance(supercomputers, list) + + @recorded_by_proxy + def test_list_supercomputers_by_subscription(self): + """Test listing supercomputers in the subscription.""" + supercomputers = list(self.client.supercomputers.list_by_subscription()) + assert isinstance(supercomputers, list) + + @recorded_by_proxy + def test_get_supercomputer(self): + """Test getting a specific supercomputer by name.""" + supercomputer = self.client.supercomputers.get(self.resource_group, "test-sc-2bbb25b8") + assert supercomputer is not None + assert hasattr(supercomputer, "name") + assert hasattr(supercomputer, "location") + + @recorded_by_proxy + def test_create_supercomputer(self): + """Test creating a supercomputer.""" + mi_id = "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" + supercomputer_data = { + "location": "uksouth", + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": {"id": mi_id}, + "kubeletIdentity": {"id": mi_id}, + "workloadIdentities": {mi_id: {}}, + }, + }, + } + operation = self.client.supercomputers.begin_create_or_update( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + resource=supercomputer_data, + ) + supercomputer = operation.result() + assert supercomputer is not None + + @pytest.mark.skip(reason="server returns 400 on supercomputer PATCH - service-side bug") + @recorded_by_proxy + def test_update_supercomputer(self): + """Test updating a supercomputer.""" + supercomputer_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.supercomputers.begin_update( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + properties=supercomputer_data, + ) + updated_supercomputer = operation.result() + assert updated_supercomputer is not None + + @recorded_by_proxy + def test_delete_supercomputer(self): + """Test deleting a supercomputer.""" + operation = self.client.supercomputers.begin_delete( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py new file mode 100644 index 000000000000..6952e6d458a0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py @@ -0,0 +1,123 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Tools operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP + + +# Known tool name in the test environment +TOOL_NAME = "test-tool-50d87c62" + + +class TestTools(DiscoveryMgmtTestCase): + """Tests for Tools operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = AZURE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_tools_by_subscription(self): + """Test listing tools in the subscription.""" + tools = list(self.client.tools.list_by_subscription()) + assert isinstance(tools, list) + + @recorded_by_proxy + def test_list_tools_by_resource_group(self): + """Test listing tools in a resource group.""" + tools = list(self.client.tools.list_by_resource_group(self.resource_group)) + assert isinstance(tools, list) + + @recorded_by_proxy + def test_get_tool(self): + """Test getting a specific tool by name.""" + tool = self.client.tools.get(self.resource_group, TOOL_NAME) + assert tool is not None + # Don't assert on name since it may be sanitized in playback + assert hasattr(tool, "name") + assert hasattr(tool, "location") + + @recorded_by_proxy + def test_create_tool(self): + """Test creating a tool.""" + tool_data = { + "location": "uksouth", + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "molpredictor", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "worker", + "infra_type": "container", + "image": {"acr": "demodiscoveryacr.azurecr.io/molpredictor:latest"}, + "compute": { + "min_resources": {"cpu": "1", "ram": "1Gi", "storage": "32", "gpu": "0"}, + "max_resources": {"cpu": "2", "ram": "1Gi", "storage": "64", "gpu": "0"}, + "recommended_sku": ["Standard_D4s_v6"], + "pool_type": "static", + "pool_size": 1, + }, + } + ], + "actions": [ + { + "name": "predict", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]", + } + }, + "required": ["action"], + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker", + } + ], + }, + }, + } + operation = self.client.tools.begin_create_or_update( + resource_group_name="olawal", + tool_name="test-tool-50d87c62", + resource=tool_data, + ) + tool = operation.result() + assert tool is not None + + @recorded_by_proxy + def test_update_tool(self): + """Test updating a tool.""" + tool_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.tools.begin_update( + resource_group_name="olawal", + tool_name=TOOL_NAME, + properties=tool_data, + ) + updated_tool = operation.result() + assert updated_tool is not None + + @recorded_by_proxy + def test_delete_tool(self): + """Test deleting a tool.""" + operation = self.client.tools.begin_delete( + resource_group_name="olawal", + tool_name=TOOL_NAME, + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py new file mode 100644 index 000000000000..af5c5e0b6591 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py @@ -0,0 +1,51 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Unit tests for azure-mgmt-discovery client. + +These tests verify client configuration without making HTTP calls. +""" +from azure.mgmt.discovery import DiscoveryMgmtClient + + +class TestDiscoveryMgmtClientUnit: + """Unit tests for Discovery management client initialization.""" + + def test_client_has_expected_operations(self): + """Test that client exposes expected operation groups.""" + from azure.identity import DefaultAzureCredential + + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-0000-0000-0000-000000000000", + ) + + # Verify operation groups exist + assert hasattr(client, "operations") + assert hasattr(client, "workspaces") + assert hasattr(client, "workspace_private_endpoint_connections") + assert hasattr(client, "workspace_private_link_resources") + assert hasattr(client, "bookshelves") + assert hasattr(client, "bookshelf_private_endpoint_connections") + assert hasattr(client, "bookshelf_private_link_resources") + assert hasattr(client, "projects") + assert hasattr(client, "storage_assets") + assert hasattr(client, "storage_containers") + assert hasattr(client, "tools") + assert hasattr(client, "supercomputers") + assert hasattr(client, "node_pools") + assert hasattr(client, "chat_model_deployments") + + def test_client_api_version(self): + """Test that client uses correct API version.""" + from azure.identity import DefaultAzureCredential + + client = DiscoveryMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-0000-0000-0000-000000000000", + ) + + # Verify API version is set + assert client._config.api_version == "2026-02-01-preview" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py new file mode 100644 index 000000000000..0563f868ab20 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py @@ -0,0 +1,92 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Unit tests for azure-mgmt-discovery models. + +These tests verify model initialization without making HTTP calls. +""" +from azure.mgmt.discovery import models + + +class TestDiscoveryModelsUnit: + """Unit tests for Discovery management SDK models.""" + + def test_workspace_model_initialization(self): + """Test Workspace model can be initialized.""" + workspace = models.Workspace( + location="eastus", + properties=models.WorkspaceProperties(), + ) + assert workspace.location == "eastus" + assert workspace.properties is not None + + def test_identity_model(self): + """Test Identity model can be initialized.""" + identity = models.Identity( + id="/subscriptions/sub/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id", + ) + assert ( + identity.id + == "/subscriptions/sub/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id" + ) + + def test_bookshelf_model_initialization(self): + """Test Bookshelf model can be initialized.""" + bookshelf = models.Bookshelf( + location="eastus", + properties=models.BookshelfProperties(), + ) + assert bookshelf.location == "eastus" + assert bookshelf.properties is not None + + def test_project_model_initialization(self): + """Test Project model can be initialized.""" + project = models.Project( + location="eastus", + properties=models.ProjectProperties(), + ) + assert project.location == "eastus" + assert project.properties is not None + + def test_storage_asset_model_initialization(self): + """Test StorageAsset model can be initialized.""" + storage = models.StorageAsset( + location="eastus", + properties=models.StorageAssetProperties(), + ) + assert storage.location == "eastus" + assert storage.properties is not None + + def test_storage_container_model_initialization(self): + """Test StorageContainer model can be initialized.""" + container = models.StorageContainer( + properties=models.StorageContainerProperties(), + ) + assert container.properties is not None + + def test_tool_model_initialization(self): + """Test Tool model can be initialized.""" + tool = models.Tool( + location="eastus", + properties=models.ToolProperties(), + ) + assert tool.location == "eastus" + assert tool.properties is not None + + def test_supercomputer_model_initialization(self): + """Test Supercomputer model can be initialized.""" + supercomputer = models.Supercomputer( + location="eastus", + properties=models.SupercomputerProperties(), + ) + assert supercomputer.location == "eastus" + assert supercomputer.properties is not None + + def test_node_pool_model_initialization(self): + """Test NodePool model can be initialized.""" + node_pool = models.NodePool( + properties=models.NodePoolProperties(), + ) + assert node_pool.properties is not None diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py new file mode 100644 index 000000000000..f52087572a82 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py @@ -0,0 +1,108 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Workspaces operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryMgmtClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group and workspace that exist in the test environment +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "test-wrksp-create01" + + +class TestWorkspaces(DiscoveryMgmtTestCase): + """Tests for Workspaces operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryMgmtClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_workspaces_by_subscription(self): + """Test listing workspaces in the subscription.""" + workspaces = list(self.client.workspaces.list_by_subscription()) + assert isinstance(workspaces, list) + assert len(workspaces) >= 1 + + @recorded_by_proxy + def test_list_workspaces_by_resource_group(self): + """Test listing workspaces in a resource group.""" + workspaces = list(self.client.workspaces.list_by_resource_group(self.resource_group)) + assert isinstance(workspaces, list) + assert len(workspaces) >= 1 + + @recorded_by_proxy + def test_get_workspace(self): + """Test getting a specific workspace by name.""" + workspace = self.client.workspaces.get(self.resource_group, WORKSPACE_NAME) + assert workspace is not None + # Don't assert on name since it may be sanitized in playback + assert hasattr(workspace, "name") + assert hasattr(workspace, "location") + + @recorded_by_proxy + def test_create_workspace(self): + """Test creating a workspace.""" + workspace_name = "test-wrksp-create01" + workspace_data = { + "location": "uksouth", + "properties": { + "supercomputerIds": [], + "workspaceIdentity": { + "id": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default3", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default2", + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906", + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "publicNetworkAccess": "Disabled", + }, + } + operation = self.client.workspaces.begin_create_or_update( + resource_group_name="olawal", + workspace_name=workspace_name, + resource=workspace_data, + ) + workspace = operation.result() + assert workspace is not None + + @recorded_by_proxy + def test_update_workspace(self): + """Test updating a workspace by changing the key vault key version.""" + # PATCH the workspace with the new key version + update_data = { + "properties": { + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVersion": "956de2fc802f49eba81ddcc348ebc27c", + }, + }, + } + operation = self.client.workspaces.begin_update( + resource_group_name=self.resource_group, + workspace_name=WORKSPACE_NAME, + properties=update_data, + ) + updated_workspace = operation.result() + assert updated_workspace is not None + + @recorded_by_proxy + def test_delete_workspace(self): + """Test deleting a workspace.""" + operation = self.client.workspaces.begin_delete( + resource_group_name="olawal", + workspace_name="test-wrksp-397d51cf", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/testcase.py b/sdk/discovery/azure-mgmt-discovery/tests/testcase.py new file mode 100644 index 000000000000..69da7fea4d3b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/testcase.py @@ -0,0 +1,37 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Base test class for azure-mgmt-discovery tests. + +Management SDK tests use AzureMgmtRecordedTestCase. +The 2026-02-01-preview API is currently behind a feature flag and requires +the EUAP (Early Update Access Program) endpoint. +""" +import os +from azure.identity import DefaultAzureCredential +from devtools_testutils import AzureMgmtRecordedTestCase + + +# EUAP endpoint required for 2026-02-01-preview API (feature-flagged) +AZURE_ARM_ENDPOINT = os.environ.get("AZURE_ARM_ENDPOINT", "https://eastus2euap.management.azure.com") +AZURE_LOCATION = os.environ.get("AZURE_LOCATION", "centraluseuap") + +# Test subscription and resource group with the feature flag enabled +AZURE_SUBSCRIPTION_ID = os.environ.get("AZURE_SUBSCRIPTION_ID", "31b0b6a5-2647-47eb-8a38-7d12047ee8ec") +AZURE_RESOURCE_GROUP = os.environ.get("AZURE_RESOURCE_GROUP", "olawal") + + +class DiscoveryMgmtTestCase(AzureMgmtRecordedTestCase): + """Base test class for Discovery management SDK tests. + + Configures the client to use the EUAP endpoint for the feature-flagged API. + """ + + def create_discovery_client(self, client_class): + """Create a Discovery client configured for the EUAP endpoint.""" + # Use environment variable for subscription or default + subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", AZURE_SUBSCRIPTION_ID) + credential = self.get_credential(client_class) + return client_class(credential=credential, subscription_id=subscription_id, base_url=AZURE_ARM_ENDPOINT) diff --git a/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml new file mode 100644 index 000000000000..dfe03767464b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml @@ -0,0 +1,8 @@ +directory: specification/discovery/Discovery.Management +commit: 402bf21e472d87c1b3abcd0b2675a8b6c8a42700 +repo: Azure/azure-rest-api-specs +additionalDirectories: +- specification/discovery/Discovery.Management.Shared +- specification/discovery/Discovery.Bookshelf.Management +- specification/discovery/Discovery.Supercomputer.Management +- specification/discovery/Discovery.Workspace.Management diff --git a/sdk/discovery/ci.yml b/sdk/discovery/ci.yml new file mode 100644 index 000000000000..148300d84180 --- /dev/null +++ b/sdk/discovery/ci.yml @@ -0,0 +1,34 @@ +# DO NOT EDIT THIS FILE +# This file is generated automatically and any changes will be lost. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/discovery/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/discovery/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: discovery + TestProxy: true + Artifacts: + - name: azure-mgmt-discovery + safeName: azuremgmtdiscovery