From 399bde6bdf5d09ea67115c6171da770972337836 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Fri, 6 Mar 2026 21:58:09 +0000 Subject: [PATCH 1/9] Configurations: 'specification/discovery/Discovery.Management/tspconfig.yaml', API Version: 2026-02-01-preview, SDK Release Type: beta, and CommitSHA: '74cc90c49189a079b3cc93fde9c9ad76742f0184' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=5975279 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. --- .../azure-mgmt-discovery/CHANGELOG.md | 7 + sdk/discovery/azure-mgmt-discovery/LICENSE | 21 + .../azure-mgmt-discovery/MANIFEST.in | 7 + sdk/discovery/azure-mgmt-discovery/README.md | 61 + .../azure-mgmt-discovery/_metadata.json | 10 + .../apiview-properties.json | 191 + .../azure-mgmt-discovery/azure/__init__.py | 1 + .../azure/mgmt/__init__.py | 1 + .../azure/mgmt/discovery/__init__.py | 32 + .../azure/mgmt/discovery/_client.py | 206 + .../azure/mgmt/discovery/_configuration.py | 80 + .../azure/mgmt/discovery/_patch.py | 21 + .../azure/mgmt/discovery/_utils/__init__.py | 6 + .../azure/mgmt/discovery/_utils/model_base.py | 1343 ++ .../mgmt/discovery/_utils/serialization.py | 2041 +++ .../azure/mgmt/discovery/_version.py | 9 + .../azure/mgmt/discovery/aio/__init__.py | 29 + .../azure/mgmt/discovery/aio/_client.py | 211 + .../mgmt/discovery/aio/_configuration.py | 80 + .../azure/mgmt/discovery/aio/_patch.py | 21 + .../mgmt/discovery/aio/operations/__init__.py | 51 + .../discovery/aio/operations/_operations.py | 9323 +++++++++++++ .../mgmt/discovery/aio/operations/_patch.py | 21 + .../azure/mgmt/discovery/models/__init__.py | 148 + .../azure/mgmt/discovery/models/_enums.py | 179 + .../azure/mgmt/discovery/models/_models.py | 2095 +++ .../azure/mgmt/discovery/models/_patch.py | 21 + .../mgmt/discovery/operations/__init__.py | 51 + .../mgmt/discovery/operations/_operations.py | 11023 ++++++++++++++++ .../azure/mgmt/discovery/operations/_patch.py | 21 + .../azure/mgmt/discovery/py.typed | 1 + .../azure-mgmt-discovery/dev_requirements.txt | 5 + ...ctions_create_or_update_maximum_set_gen.py | 53 + ...oint_connections_delete_maximum_set_gen.py | 42 + ...ndpoint_connections_get_maximum_set_gen.py | 43 + ...tions_list_by_bookshelf_maximum_set_gen.py | 43 + ...vate_link_resources_get_maximum_set_gen.py | 43 + ...urces_list_by_bookshelf_maximum_set_gen.py | 43 + ...helves_create_or_update_maximum_set_gen.py | 61 + .../bookshelves_delete_maximum_set_gen.py | 41 + .../bookshelves_get_maximum_set_gen.py | 42 + ..._list_by_resource_group_maximum_set_gen.py | 42 + ...es_list_by_subscription_maximum_set_gen.py | 40 + .../bookshelves_update_maximum_set_gen.py | 49 + ...yments_create_or_update_maximum_set_gen.py | 48 + ...odel_deployments_delete_maximum_set_gen.py | 42 + ...t_model_deployments_get_maximum_set_gen.py | 43 + ...ments_list_by_workspace_maximum_set_gen.py | 43 + ...odel_deployments_update_maximum_set_gen.py | 44 + ..._pools_create_or_update_maximum_set_gen.py | 55 + .../node_pools_delete_maximum_set_gen.py | 42 + .../node_pools_get_maximum_set_gen.py | 43 + ...s_list_by_supercomputer_maximum_set_gen.py | 43 + .../node_pools_update_maximum_set_gen.py | 44 + .../operations_list_maximum_set_gen.py | 40 + .../operations_list_minimum_set_gen.py | 40 + ...ojects_create_or_update_maximum_set_gen.py | 56 + .../projects_delete_maximum_set_gen.py | 42 + .../projects_get_maximum_set_gen.py | 43 + ...jects_list_by_workspace_maximum_set_gen.py | 43 + .../projects_update_maximum_set_gen.py | 55 + ...assets_create_or_update_maximum_set_gen.py | 48 + .../storage_assets_delete_maximum_set_gen.py | 42 + .../storage_assets_get_maximum_set_gen.py | 43 + ...st_by_storage_container_maximum_set_gen.py | 43 + .../storage_assets_update_maximum_set_gen.py | 44 + ...ainers_create_or_update_maximum_set_gen.py | 53 + ...orage_containers_delete_maximum_set_gen.py | 41 + .../storage_containers_get_maximum_set_gen.py | 42 + ..._list_by_resource_group_maximum_set_gen.py | 42 + ...rs_list_by_subscription_maximum_set_gen.py | 40 + ...orage_containers_update_maximum_set_gen.py | 43 + ...puters_create_or_update_maximum_set_gen.py | 65 + .../supercomputers_delete_maximum_set_gen.py | 41 + .../supercomputers_get_maximum_set_gen.py | 42 + ..._list_by_resource_group_maximum_set_gen.py | 42 + ...rs_list_by_subscription_maximum_set_gen.py | 40 + .../supercomputers_update_maximum_set_gen.py | 46 + .../tools_create_or_update_maximum_set_gen.py | 88 + .../tools_delete_maximum_set_gen.py | 41 + .../tools_get_maximum_set_gen.py | 42 + ..._list_by_resource_group_maximum_set_gen.py | 42 + ...ls_list_by_subscription_maximum_set_gen.py | 40 + .../tools_update_maximum_set_gen.py | 87 + ...ctions_create_or_update_maximum_set_gen.py | 53 + ...oint_connections_delete_maximum_set_gen.py | 42 + ...ndpoint_connections_get_maximum_set_gen.py | 43 + ...tions_list_by_workspace_maximum_set_gen.py | 43 + ...vate_link_resources_get_maximum_set_gen.py | 43 + ...urces_list_by_workspace_maximum_set_gen.py | 43 + ...spaces_create_or_update_maximum_set_gen.py | 66 + .../workspaces_delete_maximum_set_gen.py | 41 + .../workspaces_get_maximum_set_gen.py | 42 + ..._list_by_resource_group_maximum_set_gen.py | 42 + ...es_list_by_subscription_maximum_set_gen.py | 40 + .../workspaces_update_maximum_set_gen.py | 56 + .../generated_tests/conftest.py | 35 + ...private_endpoint_connections_operations.py | 89 + ...e_endpoint_connections_operations_async.py | 94 + ...shelf_private_link_resources_operations.py | 42 + ...private_link_resources_operations_async.py | 43 + .../test_discovery_bookshelves_operations.py | 197 + ..._discovery_bookshelves_operations_async.py | 204 + ...overy_chat_model_deployments_operations.py | 110 + ...chat_model_deployments_operations_async.py | 117 + .../test_discovery_node_pools_operations.py | 124 + ...t_discovery_node_pools_operations_async.py | 131 + .../test_discovery_operations.py | 27 + .../test_discovery_operations_async.py | 28 + .../test_discovery_projects_operations.py | 120 + ...est_discovery_projects_operations_async.py | 127 + ...est_discovery_storage_assets_operations.py | 110 + ...scovery_storage_assets_operations_async.py | 117 + ...discovery_storage_containers_operations.py | 113 + ...ery_storage_containers_operations_async.py | 120 + ...est_discovery_supercomputers_operations.py | 145 + ...scovery_supercomputers_operations_async.py | 152 + .../test_discovery_tools_operations.py | 123 + .../test_discovery_tools_operations_async.py | 130 + ...private_endpoint_connections_operations.py | 89 + ...e_endpoint_connections_operations_async.py | 94 + ...space_private_link_resources_operations.py | 42 + ...private_link_resources_operations_async.py | 43 + .../test_discovery_workspaces_operations.py | 193 + ...t_discovery_workspaces_operations_async.py | 200 + .../azure-mgmt-discovery/pyproject.toml | 86 + .../azure-mgmt-discovery/tsp-location.yaml | 8 + sdk/discovery/ci.yml | 34 + 128 files changed, 33563 insertions(+) create mode 100644 sdk/discovery/azure-mgmt-discovery/CHANGELOG.md create mode 100644 sdk/discovery/azure-mgmt-discovery/LICENSE create mode 100644 sdk/discovery/azure-mgmt-discovery/MANIFEST.in create mode 100644 sdk/discovery/azure-mgmt-discovery/README.md create mode 100644 sdk/discovery/azure-mgmt-discovery/_metadata.json create mode 100644 sdk/discovery/azure-mgmt-discovery/apiview-properties.json create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py create mode 100644 sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed create mode 100644 sdk/discovery/azure-mgmt-discovery/dev_requirements.txt create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py create mode 100644 sdk/discovery/azure-mgmt-discovery/pyproject.toml create mode 100644 sdk/discovery/azure-mgmt-discovery/tsp-location.yaml create mode 100644 sdk/discovery/ci.yml diff --git a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md new file mode 100644 index 000000000000..04110fc84c4b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0b1 (2026-03-06) + +### Other Changes + + - Initial version \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/LICENSE b/sdk/discovery/azure-mgmt-discovery/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/MANIFEST.in b/sdk/discovery/azure-mgmt-discovery/MANIFEST.in new file mode 100644 index 000000000000..9fcce60f53a6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/MANIFEST.in @@ -0,0 +1,7 @@ +include *.md +include LICENSE +include azure/mgmt/discovery/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/mgmt/__init__.py diff --git a/sdk/discovery/azure-mgmt-discovery/README.md b/sdk/discovery/azure-mgmt-discovery/README.md new file mode 100644 index 000000000000..8e9a0612cb7e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/README.md @@ -0,0 +1,61 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure Discovery Management Client Library. +This package has been tested with Python 3.9+. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + +## _Disclaimer_ + +_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ + +## Getting started + +### Prerequisites + +- Python 3.9+ is required to use this package. +- [Azure subscription](https://azure.microsoft.com/free/) + +### Install the package + +```bash +pip install azure-mgmt-discovery +pip install azure-identity +``` + +### Authentication + +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables. + +- `AZURE_CLIENT_ID` for Azure client ID. +- `AZURE_TENANT_ID` for Azure tenant ID. +- `AZURE_CLIENT_SECRET` for Azure client secret. + +In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`. + +With above configuration, client can be authenticated by following code: + +```python +from azure.identity import DefaultAzureCredential +from azure.mgmt.discovery import DiscoveryClient +import os + +sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") +client = DiscoveryClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +``` + +## Examples + +Code samples for this package can be found at: +- [Search Discovery Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com +- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +## Troubleshooting + +## Next steps + +## Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. diff --git a/sdk/discovery/azure-mgmt-discovery/_metadata.json b/sdk/discovery/azure-mgmt-discovery/_metadata.json new file mode 100644 index 000000000000..9fa699c9eb60 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/_metadata.json @@ -0,0 +1,10 @@ +{ + "apiVersion": "2026-02-01-preview", + "apiVersions": { + "Microsoft.Discovery": "2026-02-01-preview" + }, + "commit": "74cc90c49189a079b3cc93fde9c9ad76742f0184", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/discovery/Discovery.Management", + "emitterVersion": "0.60.2" +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/apiview-properties.json b/sdk/discovery/azure-mgmt-discovery/apiview-properties.json new file mode 100644 index 000000000000..f5370173cc3b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/apiview-properties.json @@ -0,0 +1,191 @@ +{ + "CrossLanguagePackageId": "Microsoft.Discovery", + "CrossLanguageDefinitionId": { + "azure.mgmt.discovery.models.StorageStore": "Microsoft.Discovery.StorageStore", + "azure.mgmt.discovery.models.AzureNetAppFilesStore": "Microsoft.Discovery.AzureNetAppFilesStore", + "azure.mgmt.discovery.models.AzureStorageBlobStore": "Microsoft.Discovery.AzureStorageBlobStore", + "azure.mgmt.discovery.models.Resource": "Azure.ResourceManager.CommonTypes.Resource", + "azure.mgmt.discovery.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource", + "azure.mgmt.discovery.models.Bookshelf": "Microsoft.Discovery.Bookshelf", + "azure.mgmt.discovery.models.BookshelfKeyVaultProperties": "Microsoft.Discovery.BookshelfKeyVaultProperties", + "azure.mgmt.discovery.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource", + "azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection": "Microsoft.Discovery.BookshelfPrivateEndpointConnection", + "azure.mgmt.discovery.models.BookshelfPrivateLinkResource": "Microsoft.Discovery.BookshelfPrivateLinkResource", + "azure.mgmt.discovery.models.BookshelfProperties": "Microsoft.Discovery.BookshelfProperties", + "azure.mgmt.discovery.models.ChatModelDeployment": "Microsoft.Discovery.ChatModelDeployment", + "azure.mgmt.discovery.models.ChatModelDeploymentProperties": "Microsoft.Discovery.ChatModelDeploymentProperties", + "azure.mgmt.discovery.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", + "azure.mgmt.discovery.models.ErrorDetail": "Azure.ResourceManager.CommonTypes.ErrorDetail", + "azure.mgmt.discovery.models.ErrorResponse": "Azure.ResourceManager.CommonTypes.ErrorResponse", + "azure.mgmt.discovery.models.Identity": "Microsoft.Discovery.Identity", + "azure.mgmt.discovery.models.KeyVaultProperties": "Microsoft.Discovery.KeyVaultProperties", + "azure.mgmt.discovery.models.MoboBrokerResource": "Azure.ResourceManager.CommonTypes.MoboBrokerResource", + "azure.mgmt.discovery.models.NodePool": "Microsoft.Discovery.NodePool", + "azure.mgmt.discovery.models.NodePoolProperties": "Microsoft.Discovery.NodePoolProperties", + "azure.mgmt.discovery.models.Operation": "Azure.ResourceManager.CommonTypes.Operation", + "azure.mgmt.discovery.models.OperationDisplay": "Azure.ResourceManager.CommonTypes.OperationDisplay", + "azure.mgmt.discovery.models.PrivateEndpoint": "Azure.ResourceManager.CommonTypes.PrivateEndpoint", + "azure.mgmt.discovery.models.PrivateEndpointConnection": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnection", + "azure.mgmt.discovery.models.PrivateEndpointConnectionProperties": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProperties", + "azure.mgmt.discovery.models.PrivateLinkResourceProperties": "Azure.ResourceManager.CommonTypes.PrivateLinkResourceProperties", + "azure.mgmt.discovery.models.PrivateLinkServiceConnectionState": "Azure.ResourceManager.CommonTypes.PrivateLinkServiceConnectionState", + "azure.mgmt.discovery.models.Project": "Microsoft.Discovery.Project", + "azure.mgmt.discovery.models.ProjectProperties": "Microsoft.Discovery.ProjectProperties", + "azure.mgmt.discovery.models.ProjectSettings": "Microsoft.Discovery.ProjectSettings", + "azure.mgmt.discovery.models.StorageAsset": "Microsoft.Discovery.StorageAsset", + "azure.mgmt.discovery.models.StorageAssetProperties": "Microsoft.Discovery.StorageAssetProperties", + "azure.mgmt.discovery.models.StorageContainer": "Microsoft.Discovery.StorageContainer", + "azure.mgmt.discovery.models.StorageContainerProperties": "Microsoft.Discovery.StorageContainerProperties", + "azure.mgmt.discovery.models.Supercomputer": "Microsoft.Discovery.Supercomputer", + "azure.mgmt.discovery.models.SupercomputerIdentities": "Microsoft.Discovery.SupercomputerIdentities", + "azure.mgmt.discovery.models.SupercomputerProperties": "Microsoft.Discovery.SupercomputerProperties", + "azure.mgmt.discovery.models.SystemData": "Azure.ResourceManager.CommonTypes.SystemData", + "azure.mgmt.discovery.models.Tool": "Microsoft.Discovery.Tool", + "azure.mgmt.discovery.models.ToolProperties": "Microsoft.Discovery.ToolProperties", + "azure.mgmt.discovery.models.UserAssignedIdentity": "Azure.ResourceManager.CommonTypes.UserAssignedIdentity", + "azure.mgmt.discovery.models.WithMoboBrokerResources": "Microsoft.Discovery.WithMoboBrokerResources", + "azure.mgmt.discovery.models.Workspace": "Microsoft.Discovery.Workspace", + "azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection": "Microsoft.Discovery.WorkspacePrivateEndpointConnection", + "azure.mgmt.discovery.models.WorkspacePrivateLinkResource": "Microsoft.Discovery.WorkspacePrivateLinkResource", + "azure.mgmt.discovery.models.WorkspaceProperties": "Microsoft.Discovery.WorkspaceProperties", + "azure.mgmt.discovery.models.Origin": "Azure.ResourceManager.CommonTypes.Origin", + "azure.mgmt.discovery.models.ActionType": "Azure.ResourceManager.CommonTypes.ActionType", + "azure.mgmt.discovery.models.CreatedByType": "Azure.ResourceManager.CommonTypes.createdByType", + "azure.mgmt.discovery.models.ProvisioningState": "Microsoft.Discovery.ProvisioningState", + "azure.mgmt.discovery.models.CustomerManagedKeys": "Microsoft.Discovery.CustomerManagedKeys", + "azure.mgmt.discovery.models.PrivateEndpointServiceConnectionStatus": "Azure.ResourceManager.CommonTypes.PrivateEndpointServiceConnectionStatus", + "azure.mgmt.discovery.models.PrivateEndpointConnectionProvisioningState": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProvisioningState", + "azure.mgmt.discovery.models.PublicNetworkAccess": "Microsoft.Discovery.PublicNetworkAccess", + "azure.mgmt.discovery.models.VmSize": "Microsoft.Discovery.VmSize", + "azure.mgmt.discovery.models.ScaleSetPriority": "Microsoft.Discovery.ScaleSetPriority", + "azure.mgmt.discovery.models.NetworkEgressType": "Microsoft.Discovery.NetworkEgressType", + "azure.mgmt.discovery.models.SystemSku": "Microsoft.Discovery.SystemSku", + "azure.mgmt.discovery.models.StorageStoreType": "Microsoft.Discovery.StorageStoreType", + "azure.mgmt.discovery.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.discovery.aio.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.discovery.operations.BookshelvesOperations.get": "Microsoft.Discovery.Bookshelves.get", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.get": "Microsoft.Discovery.Bookshelves.get", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_create_or_update": "Microsoft.Discovery.Bookshelves.createOrUpdate", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_create_or_update": "Microsoft.Discovery.Bookshelves.createOrUpdate", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_update": "Microsoft.Discovery.Bookshelves.update", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_update": "Microsoft.Discovery.Bookshelves.update", + "azure.mgmt.discovery.operations.BookshelvesOperations.begin_delete": "Microsoft.Discovery.Bookshelves.delete", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.begin_delete": "Microsoft.Discovery.Bookshelves.delete", + "azure.mgmt.discovery.operations.BookshelvesOperations.list_by_resource_group": "Microsoft.Discovery.Bookshelves.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.list_by_resource_group": "Microsoft.Discovery.Bookshelves.listByResourceGroup", + "azure.mgmt.discovery.operations.BookshelvesOperations.list_by_subscription": "Microsoft.Discovery.Bookshelves.listBySubscription", + "azure.mgmt.discovery.aio.operations.BookshelvesOperations.list_by_subscription": "Microsoft.Discovery.Bookshelves.listBySubscription", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.get", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.get", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.delete", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.delete", + "azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.listByBookshelf", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateEndpointConnections.listByBookshelf", + "azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations.get": "Microsoft.Discovery.BookshelfPrivateLinkResources.get", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations.get": "Microsoft.Discovery.BookshelfPrivateLinkResources.get", + "azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateLinkResources.listByBookshelf", + "azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations.list_by_bookshelf": "Microsoft.Discovery.BookshelfPrivateLinkResources.listByBookshelf", + "azure.mgmt.discovery.operations.ToolsOperations.get": "Microsoft.Discovery.Tools.get", + "azure.mgmt.discovery.aio.operations.ToolsOperations.get": "Microsoft.Discovery.Tools.get", + "azure.mgmt.discovery.operations.ToolsOperations.begin_create_or_update": "Microsoft.Discovery.Tools.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_create_or_update": "Microsoft.Discovery.Tools.createOrUpdate", + "azure.mgmt.discovery.operations.ToolsOperations.begin_update": "Microsoft.Discovery.Tools.update", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_update": "Microsoft.Discovery.Tools.update", + "azure.mgmt.discovery.operations.ToolsOperations.begin_delete": "Microsoft.Discovery.Tools.delete", + "azure.mgmt.discovery.aio.operations.ToolsOperations.begin_delete": "Microsoft.Discovery.Tools.delete", + "azure.mgmt.discovery.operations.ToolsOperations.list_by_resource_group": "Microsoft.Discovery.Tools.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.ToolsOperations.list_by_resource_group": "Microsoft.Discovery.Tools.listByResourceGroup", + "azure.mgmt.discovery.operations.ToolsOperations.list_by_subscription": "Microsoft.Discovery.Tools.listBySubscription", + "azure.mgmt.discovery.aio.operations.ToolsOperations.list_by_subscription": "Microsoft.Discovery.Tools.listBySubscription", + "azure.mgmt.discovery.operations.ProjectsOperations.get": "Microsoft.Discovery.Projects.get", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.get": "Microsoft.Discovery.Projects.get", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_create_or_update": "Microsoft.Discovery.Projects.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_create_or_update": "Microsoft.Discovery.Projects.createOrUpdate", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_update": "Microsoft.Discovery.Projects.update", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_update": "Microsoft.Discovery.Projects.update", + "azure.mgmt.discovery.operations.ProjectsOperations.begin_delete": "Microsoft.Discovery.Projects.delete", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.begin_delete": "Microsoft.Discovery.Projects.delete", + "azure.mgmt.discovery.operations.ProjectsOperations.list_by_workspace": "Microsoft.Discovery.Projects.listByWorkspace", + "azure.mgmt.discovery.aio.operations.ProjectsOperations.list_by_workspace": "Microsoft.Discovery.Projects.listByWorkspace", + "azure.mgmt.discovery.operations.WorkspacesOperations.get": "Microsoft.Discovery.Workspaces.get", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.get": "Microsoft.Discovery.Workspaces.get", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_create_or_update": "Microsoft.Discovery.Workspaces.createOrUpdate", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_create_or_update": "Microsoft.Discovery.Workspaces.createOrUpdate", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_update": "Microsoft.Discovery.Workspaces.update", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_update": "Microsoft.Discovery.Workspaces.update", + "azure.mgmt.discovery.operations.WorkspacesOperations.begin_delete": "Microsoft.Discovery.Workspaces.delete", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.begin_delete": "Microsoft.Discovery.Workspaces.delete", + "azure.mgmt.discovery.operations.WorkspacesOperations.list_by_resource_group": "Microsoft.Discovery.Workspaces.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.list_by_resource_group": "Microsoft.Discovery.Workspaces.listByResourceGroup", + "azure.mgmt.discovery.operations.WorkspacesOperations.list_by_subscription": "Microsoft.Discovery.Workspaces.listBySubscription", + "azure.mgmt.discovery.aio.operations.WorkspacesOperations.list_by_subscription": "Microsoft.Discovery.Workspaces.listBySubscription", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.get", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.get": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.get", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.begin_create_or_update": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.createOrUpdate", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.delete", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.begin_delete": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.delete", + "azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.listByWorkspace", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateEndpointConnections.listByWorkspace", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.get": "Microsoft.Discovery.ChatModelDeployments.get", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.get": "Microsoft.Discovery.ChatModelDeployments.get", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_create_or_update": "Microsoft.Discovery.ChatModelDeployments.createOrUpdate", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_create_or_update": "Microsoft.Discovery.ChatModelDeployments.createOrUpdate", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_update": "Microsoft.Discovery.ChatModelDeployments.update", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_update": "Microsoft.Discovery.ChatModelDeployments.update", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.begin_delete": "Microsoft.Discovery.ChatModelDeployments.delete", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.begin_delete": "Microsoft.Discovery.ChatModelDeployments.delete", + "azure.mgmt.discovery.operations.ChatModelDeploymentsOperations.list_by_workspace": "Microsoft.Discovery.ChatModelDeployments.listByWorkspace", + "azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations.list_by_workspace": "Microsoft.Discovery.ChatModelDeployments.listByWorkspace", + "azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations.get": "Microsoft.Discovery.WorkspacePrivateLinkResources.get", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations.get": "Microsoft.Discovery.WorkspacePrivateLinkResources.get", + "azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateLinkResources.listByWorkspace", + "azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations.list_by_workspace": "Microsoft.Discovery.WorkspacePrivateLinkResources.listByWorkspace", + "azure.mgmt.discovery.operations.NodePoolsOperations.get": "Microsoft.Discovery.NodePools.get", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.get": "Microsoft.Discovery.NodePools.get", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_create_or_update": "Microsoft.Discovery.NodePools.createOrUpdate", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_create_or_update": "Microsoft.Discovery.NodePools.createOrUpdate", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_update": "Microsoft.Discovery.NodePools.update", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_update": "Microsoft.Discovery.NodePools.update", + "azure.mgmt.discovery.operations.NodePoolsOperations.begin_delete": "Microsoft.Discovery.NodePools.delete", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.begin_delete": "Microsoft.Discovery.NodePools.delete", + "azure.mgmt.discovery.operations.NodePoolsOperations.list_by_supercomputer": "Microsoft.Discovery.NodePools.listBySupercomputer", + "azure.mgmt.discovery.aio.operations.NodePoolsOperations.list_by_supercomputer": "Microsoft.Discovery.NodePools.listBySupercomputer", + "azure.mgmt.discovery.operations.SupercomputersOperations.get": "Microsoft.Discovery.Supercomputers.get", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.get": "Microsoft.Discovery.Supercomputers.get", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_create_or_update": "Microsoft.Discovery.Supercomputers.createOrUpdate", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_create_or_update": "Microsoft.Discovery.Supercomputers.createOrUpdate", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_update": "Microsoft.Discovery.Supercomputers.update", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_update": "Microsoft.Discovery.Supercomputers.update", + "azure.mgmt.discovery.operations.SupercomputersOperations.begin_delete": "Microsoft.Discovery.Supercomputers.delete", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.begin_delete": "Microsoft.Discovery.Supercomputers.delete", + "azure.mgmt.discovery.operations.SupercomputersOperations.list_by_resource_group": "Microsoft.Discovery.Supercomputers.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.list_by_resource_group": "Microsoft.Discovery.Supercomputers.listByResourceGroup", + "azure.mgmt.discovery.operations.SupercomputersOperations.list_by_subscription": "Microsoft.Discovery.Supercomputers.listBySubscription", + "azure.mgmt.discovery.aio.operations.SupercomputersOperations.list_by_subscription": "Microsoft.Discovery.Supercomputers.listBySubscription", + "azure.mgmt.discovery.operations.StorageAssetsOperations.get": "Microsoft.Discovery.StorageAssets.get", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.get": "Microsoft.Discovery.StorageAssets.get", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_create_or_update": "Microsoft.Discovery.StorageAssets.createOrUpdate", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_create_or_update": "Microsoft.Discovery.StorageAssets.createOrUpdate", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_update": "Microsoft.Discovery.StorageAssets.update", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_update": "Microsoft.Discovery.StorageAssets.update", + "azure.mgmt.discovery.operations.StorageAssetsOperations.begin_delete": "Microsoft.Discovery.StorageAssets.delete", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.begin_delete": "Microsoft.Discovery.StorageAssets.delete", + "azure.mgmt.discovery.operations.StorageAssetsOperations.list_by_storage_container": "Microsoft.Discovery.StorageAssets.listByStorageContainer", + "azure.mgmt.discovery.aio.operations.StorageAssetsOperations.list_by_storage_container": "Microsoft.Discovery.StorageAssets.listByStorageContainer", + "azure.mgmt.discovery.operations.StorageContainersOperations.get": "Microsoft.Discovery.StorageContainers.get", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.get": "Microsoft.Discovery.StorageContainers.get", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_create_or_update": "Microsoft.Discovery.StorageContainers.createOrUpdate", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_create_or_update": "Microsoft.Discovery.StorageContainers.createOrUpdate", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_update": "Microsoft.Discovery.StorageContainers.update", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_update": "Microsoft.Discovery.StorageContainers.update", + "azure.mgmt.discovery.operations.StorageContainersOperations.begin_delete": "Microsoft.Discovery.StorageContainers.delete", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.begin_delete": "Microsoft.Discovery.StorageContainers.delete", + "azure.mgmt.discovery.operations.StorageContainersOperations.list_by_resource_group": "Microsoft.Discovery.StorageContainers.listByResourceGroup", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.list_by_resource_group": "Microsoft.Discovery.StorageContainers.listByResourceGroup", + "azure.mgmt.discovery.operations.StorageContainersOperations.list_by_subscription": "Microsoft.Discovery.StorageContainers.listBySubscription", + "azure.mgmt.discovery.aio.operations.StorageContainersOperations.list_by_subscription": "Microsoft.Discovery.StorageContainers.listBySubscription" + } +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/azure/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py new file mode 100644 index 000000000000..ec6aee3555b7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import DiscoveryClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "DiscoveryClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py new file mode 100644 index 000000000000..8877a29ffded --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py @@ -0,0 +1,206 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.settings import settings +from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from ._configuration import DiscoveryClientConfiguration +from ._utils.serialization import Deserializer, Serializer +from .operations import ( + BookshelfPrivateEndpointConnectionsOperations, + BookshelfPrivateLinkResourcesOperations, + BookshelvesOperations, + ChatModelDeploymentsOperations, + NodePoolsOperations, + Operations, + ProjectsOperations, + StorageAssetsOperations, + StorageContainersOperations, + SupercomputersOperations, + ToolsOperations, + WorkspacePrivateEndpointConnectionsOperations, + WorkspacePrivateLinkResourcesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class DiscoveryClient: # pylint: disable=too-many-instance-attributes + """Microsoft.Discovery Resource Provider management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.discovery.operations.Operations + :ivar bookshelves: BookshelvesOperations operations + :vartype bookshelves: azure.mgmt.discovery.operations.BookshelvesOperations + :ivar bookshelf_private_endpoint_connections: BookshelfPrivateEndpointConnectionsOperations + operations + :vartype bookshelf_private_endpoint_connections: + azure.mgmt.discovery.operations.BookshelfPrivateEndpointConnectionsOperations + :ivar bookshelf_private_link_resources: BookshelfPrivateLinkResourcesOperations operations + :vartype bookshelf_private_link_resources: + azure.mgmt.discovery.operations.BookshelfPrivateLinkResourcesOperations + :ivar tools: ToolsOperations operations + :vartype tools: azure.mgmt.discovery.operations.ToolsOperations + :ivar projects: ProjectsOperations operations + :vartype projects: azure.mgmt.discovery.operations.ProjectsOperations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.discovery.operations.WorkspacesOperations + :ivar workspace_private_endpoint_connections: WorkspacePrivateEndpointConnectionsOperations + operations + :vartype workspace_private_endpoint_connections: + azure.mgmt.discovery.operations.WorkspacePrivateEndpointConnectionsOperations + :ivar chat_model_deployments: ChatModelDeploymentsOperations operations + :vartype chat_model_deployments: azure.mgmt.discovery.operations.ChatModelDeploymentsOperations + :ivar workspace_private_link_resources: WorkspacePrivateLinkResourcesOperations operations + :vartype workspace_private_link_resources: + azure.mgmt.discovery.operations.WorkspacePrivateLinkResourcesOperations + :ivar node_pools: NodePoolsOperations operations + :vartype node_pools: azure.mgmt.discovery.operations.NodePoolsOperations + :ivar supercomputers: SupercomputersOperations operations + :vartype supercomputers: azure.mgmt.discovery.operations.SupercomputersOperations + :ivar storage_assets: StorageAssetsOperations operations + :vartype storage_assets: azure.mgmt.discovery.operations.StorageAssetsOperations + :ivar storage_containers: StorageContainersOperations operations + :vartype storage_containers: azure.mgmt.discovery.operations.StorageContainersOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = DiscoveryClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, _endpoint), policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelves = BookshelvesOperations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelf_private_endpoint_connections = BookshelfPrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.bookshelf_private_link_resources = BookshelfPrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_private_endpoint_connections = WorkspacePrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.chat_model_deployments = ChatModelDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_private_link_resources = WorkspacePrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.node_pools = NodePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.supercomputers = SupercomputersOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_assets = StorageAssetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_containers = StorageContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py new file mode 100644 index 000000000000..a3e76738a1ed --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class DiscoveryClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-02-01-preview") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-discovery/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py new file mode 100644 index 000000000000..b4433021b4e5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/model_base.py @@ -0,0 +1,1343 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py new file mode 100644 index 000000000000..81ec1de5922b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_utils/serialization.py @@ -0,0 +1,2041 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py new file mode 100644 index 000000000000..ec0f970e7aa8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import DiscoveryClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "DiscoveryClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py new file mode 100644 index 000000000000..754cf3700ee5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py @@ -0,0 +1,211 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.settings import settings +from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import DiscoveryClientConfiguration +from .operations import ( + BookshelfPrivateEndpointConnectionsOperations, + BookshelfPrivateLinkResourcesOperations, + BookshelvesOperations, + ChatModelDeploymentsOperations, + NodePoolsOperations, + Operations, + ProjectsOperations, + StorageAssetsOperations, + StorageContainersOperations, + SupercomputersOperations, + ToolsOperations, + WorkspacePrivateEndpointConnectionsOperations, + WorkspacePrivateLinkResourcesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class DiscoveryClient: # pylint: disable=too-many-instance-attributes + """Microsoft.Discovery Resource Provider management API. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.discovery.aio.operations.Operations + :ivar bookshelves: BookshelvesOperations operations + :vartype bookshelves: azure.mgmt.discovery.aio.operations.BookshelvesOperations + :ivar bookshelf_private_endpoint_connections: BookshelfPrivateEndpointConnectionsOperations + operations + :vartype bookshelf_private_endpoint_connections: + azure.mgmt.discovery.aio.operations.BookshelfPrivateEndpointConnectionsOperations + :ivar bookshelf_private_link_resources: BookshelfPrivateLinkResourcesOperations operations + :vartype bookshelf_private_link_resources: + azure.mgmt.discovery.aio.operations.BookshelfPrivateLinkResourcesOperations + :ivar tools: ToolsOperations operations + :vartype tools: azure.mgmt.discovery.aio.operations.ToolsOperations + :ivar projects: ProjectsOperations operations + :vartype projects: azure.mgmt.discovery.aio.operations.ProjectsOperations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.discovery.aio.operations.WorkspacesOperations + :ivar workspace_private_endpoint_connections: WorkspacePrivateEndpointConnectionsOperations + operations + :vartype workspace_private_endpoint_connections: + azure.mgmt.discovery.aio.operations.WorkspacePrivateEndpointConnectionsOperations + :ivar chat_model_deployments: ChatModelDeploymentsOperations operations + :vartype chat_model_deployments: + azure.mgmt.discovery.aio.operations.ChatModelDeploymentsOperations + :ivar workspace_private_link_resources: WorkspacePrivateLinkResourcesOperations operations + :vartype workspace_private_link_resources: + azure.mgmt.discovery.aio.operations.WorkspacePrivateLinkResourcesOperations + :ivar node_pools: NodePoolsOperations operations + :vartype node_pools: azure.mgmt.discovery.aio.operations.NodePoolsOperations + :ivar supercomputers: SupercomputersOperations operations + :vartype supercomputers: azure.mgmt.discovery.aio.operations.SupercomputersOperations + :ivar storage_assets: StorageAssetsOperations operations + :vartype storage_assets: azure.mgmt.discovery.aio.operations.StorageAssetsOperations + :ivar storage_containers: StorageContainersOperations operations + :vartype storage_containers: azure.mgmt.discovery.aio.operations.StorageContainersOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = DiscoveryClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( + base_url=cast(str, _endpoint), policies=_policies, **kwargs + ) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelves = BookshelvesOperations(self._client, self._config, self._serialize, self._deserialize) + self.bookshelf_private_endpoint_connections = BookshelfPrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.bookshelf_private_link_resources = BookshelfPrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.tools = ToolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_private_endpoint_connections = WorkspacePrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.chat_model_deployments = ChatModelDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_private_link_resources = WorkspacePrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.node_pools = NodePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.supercomputers = SupercomputersOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_assets = StorageAssetsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage_containers = StorageContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py new file mode 100644 index 000000000000..7a23d841e7f2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class DiscoveryClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are + "2026-02-01-preview" and None. Default value is "2026-02-01-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-02-01-preview") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-discovery/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py new file mode 100644 index 000000000000..6a22eb1590c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import Operations # type: ignore +from ._operations import BookshelvesOperations # type: ignore +from ._operations import BookshelfPrivateEndpointConnectionsOperations # type: ignore +from ._operations import BookshelfPrivateLinkResourcesOperations # type: ignore +from ._operations import ToolsOperations # type: ignore +from ._operations import ProjectsOperations # type: ignore +from ._operations import WorkspacesOperations # type: ignore +from ._operations import WorkspacePrivateEndpointConnectionsOperations # type: ignore +from ._operations import ChatModelDeploymentsOperations # type: ignore +from ._operations import WorkspacePrivateLinkResourcesOperations # type: ignore +from ._operations import NodePoolsOperations # type: ignore +from ._operations import SupercomputersOperations # type: ignore +from ._operations import StorageAssetsOperations # type: ignore +from ._operations import StorageContainersOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Operations", + "BookshelvesOperations", + "BookshelfPrivateEndpointConnectionsOperations", + "BookshelfPrivateLinkResourcesOperations", + "ToolsOperations", + "ProjectsOperations", + "WorkspacesOperations", + "WorkspacePrivateEndpointConnectionsOperations", + "ChatModelDeploymentsOperations", + "WorkspacePrivateLinkResourcesOperations", + "NodePoolsOperations", + "SupercomputersOperations", + "StorageAssetsOperations", + "StorageContainersOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py new file mode 100644 index 000000000000..d679affd9ddb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py @@ -0,0 +1,9323 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.serialization import Deserializer, Serializer +from ...operations._operations import ( + build_bookshelf_private_endpoint_connections_create_or_update_request, + build_bookshelf_private_endpoint_connections_delete_request, + build_bookshelf_private_endpoint_connections_get_request, + build_bookshelf_private_endpoint_connections_list_by_bookshelf_request, + build_bookshelf_private_link_resources_get_request, + build_bookshelf_private_link_resources_list_by_bookshelf_request, + build_bookshelves_create_or_update_request, + build_bookshelves_delete_request, + build_bookshelves_get_request, + build_bookshelves_list_by_resource_group_request, + build_bookshelves_list_by_subscription_request, + build_bookshelves_update_request, + build_chat_model_deployments_create_or_update_request, + build_chat_model_deployments_delete_request, + build_chat_model_deployments_get_request, + build_chat_model_deployments_list_by_workspace_request, + build_chat_model_deployments_update_request, + build_node_pools_create_or_update_request, + build_node_pools_delete_request, + build_node_pools_get_request, + build_node_pools_list_by_supercomputer_request, + build_node_pools_update_request, + build_operations_list_request, + build_projects_create_or_update_request, + build_projects_delete_request, + build_projects_get_request, + build_projects_list_by_workspace_request, + build_projects_update_request, + build_storage_assets_create_or_update_request, + build_storage_assets_delete_request, + build_storage_assets_get_request, + build_storage_assets_list_by_storage_container_request, + build_storage_assets_update_request, + build_storage_containers_create_or_update_request, + build_storage_containers_delete_request, + build_storage_containers_get_request, + build_storage_containers_list_by_resource_group_request, + build_storage_containers_list_by_subscription_request, + build_storage_containers_update_request, + build_supercomputers_create_or_update_request, + build_supercomputers_delete_request, + build_supercomputers_get_request, + build_supercomputers_list_by_resource_group_request, + build_supercomputers_list_by_subscription_request, + build_supercomputers_update_request, + build_tools_create_or_update_request, + build_tools_delete_request, + build_tools_get_request, + build_tools_list_by_resource_group_request, + build_tools_list_by_subscription_request, + build_tools_update_request, + build_workspace_private_endpoint_connections_create_or_update_request, + build_workspace_private_endpoint_connections_delete_request, + build_workspace_private_endpoint_connections_get_request, + build_workspace_private_endpoint_connections_list_by_workspace_request, + build_workspace_private_link_resources_get_request, + build_workspace_private_link_resources_list_by_workspace_request, + build_workspaces_create_or_update_request, + build_workspaces_delete_request, + build_workspaces_get_request, + build_workspaces_list_by_resource_group_request, + build_workspaces_list_by_subscription_request, + build_workspaces_update_request, +) +from .._configuration import DiscoveryClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] +List = list + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelvesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`bookshelves` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> _models.Bookshelf: + """Get a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: Bookshelf. The Bookshelf is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Bookshelf + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + + _request = build_bookshelves_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Bookshelf, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Is one of the following types: Bookshelf, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Bookshelf, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelves_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by subscription ID. + + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`bookshelf_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: BookshelfPrivateEndpointConnection. The BookshelfPrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelf_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: _models.BookshelfPrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + BookshelfPrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.BookshelfPrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.BookshelfPrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BookshelfPrivateEndpointConnection"]: + """Lists all private endpoint connections for a bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class BookshelfPrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`bookshelf_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateLinkResource: + """Gets the specified private link resource for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: BookshelfPrivateLinkResource. The BookshelfPrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_link_resources_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.BookshelfPrivateLinkResource"]: + """Lists all private link resources for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateLinkResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_link_resources_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ToolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`tools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> _models.Tool: + """Get a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Tool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + + _request = build_tools_get_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Tool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_create_or_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Is one of the following types: Tool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Is one of the following types: Tool, + JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Tool. The Tool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_tools_delete_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Tool"]: + """List Tool resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Tool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Tool"]: + """List Tool resources by subscription ID. + + :return: An iterator like instance of Tool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ProjectsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`projects` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> _models.Project: + """Get a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: Project. The Project is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Project + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + + _request = build_projects_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Project, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Is one of the following types: Project, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Project, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_projects_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Project"]: + """List Project resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Project + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Project]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_projects_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`workspaces` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Get a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: Workspace. The Workspace is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + + _request = build_workspaces_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Workspace, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Is one of the following types: Workspace, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Workspace, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspaces_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: + """List Workspace resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Workspace + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: + """List Workspace resources by subscription ID. + + :return: An iterator like instance of Workspace + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`workspace_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: WorkspacePrivateEndpointConnection. The WorkspacePrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspace_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: _models.WorkspacePrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + WorkspacePrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.WorkspacePrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.WorkspacePrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.WorkspacePrivateEndpointConnection"]: + """Lists all private endpoint connections for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_endpoint_connections_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ChatModelDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`chat_model_deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> _models.ChatModelDeployment: + """Get a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: ChatModelDeployment. The ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.ChatModelDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns ChatModelDeployment. The + ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.ChatModelDeployment"]: + """List ChatModelDeployment resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of ChatModelDeployment + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ChatModelDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_chat_model_deployments_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ChatModelDeployment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class WorkspacePrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`workspace_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateLinkResource: + """Gets the specified private link resource for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: WorkspacePrivateLinkResource. The WorkspacePrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_workspace_private_link_resources_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.WorkspacePrivateLinkResource"]: + """Lists all private link resources for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateLinkResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_link_resources_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class NodePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`node_pools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> _models.NodePool: + """Get a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: NodePool. The NodePool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.NodePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + + _request = build_node_pools_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NodePool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Is one of the following types: NodePool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Is one of the following types: + NodePool, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_node_pools_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_supercomputer( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.NodePool"]: + """List NodePool resources by Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An iterator like instance of NodePool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NodePool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_node_pools_list_by_supercomputer_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NodePool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class SupercomputersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`supercomputers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> _models.Supercomputer: + """Get a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: Supercomputer. The Supercomputer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Supercomputer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + + _request = build_supercomputers_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Supercomputer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Is one of the following types: Supercomputer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Supercomputer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns Supercomputer. The Supercomputer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_supercomputers_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by subscription ID. + + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageAssetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`storage_assets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> _models.StorageAsset: + """Get a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: StorageAsset. The StorageAsset is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageAsset + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + + _request = build_storage_assets_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAsset, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Is one of the following types: StorageAsset, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageAsset, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageAsset. The StorageAsset is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_assets_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_storage_container( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageAsset"]: + """List StorageAsset resources by StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An iterator like instance of StorageAsset + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAsset]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_assets_list_by_storage_container_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAsset], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class StorageContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :attr:`storage_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> _models.StorageContainer: + """Get a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: StorageContainer. The StorageContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + + _request = build_storage_containers_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Is one of the following types: StorageContainer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageContainer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_containers_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by subscription ID. + + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py new file mode 100644 index 000000000000..5b2204e6df45 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/__init__.py @@ -0,0 +1,148 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + AzureNetAppFilesStore, + AzureStorageBlobStore, + Bookshelf, + BookshelfKeyVaultProperties, + BookshelfPrivateEndpointConnection, + BookshelfPrivateLinkResource, + BookshelfProperties, + ChatModelDeployment, + ChatModelDeploymentProperties, + ErrorAdditionalInfo, + ErrorDetail, + ErrorResponse, + Identity, + KeyVaultProperties, + MoboBrokerResource, + NodePool, + NodePoolProperties, + Operation, + OperationDisplay, + PrivateEndpoint, + PrivateEndpointConnection, + PrivateEndpointConnectionProperties, + PrivateLinkResourceProperties, + PrivateLinkServiceConnectionState, + Project, + ProjectProperties, + ProjectSettings, + ProxyResource, + Resource, + StorageAsset, + StorageAssetProperties, + StorageContainer, + StorageContainerProperties, + StorageStore, + Supercomputer, + SupercomputerIdentities, + SupercomputerProperties, + SystemData, + Tool, + ToolProperties, + TrackedResource, + UserAssignedIdentity, + WithMoboBrokerResources, + Workspace, + WorkspacePrivateEndpointConnection, + WorkspacePrivateLinkResource, + WorkspaceProperties, +) + +from ._enums import ( # type: ignore + ActionType, + CreatedByType, + CustomerManagedKeys, + NetworkEgressType, + Origin, + PrivateEndpointConnectionProvisioningState, + PrivateEndpointServiceConnectionStatus, + ProvisioningState, + PublicNetworkAccess, + ScaleSetPriority, + StorageStoreType, + SystemSku, + VmSize, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureNetAppFilesStore", + "AzureStorageBlobStore", + "Bookshelf", + "BookshelfKeyVaultProperties", + "BookshelfPrivateEndpointConnection", + "BookshelfPrivateLinkResource", + "BookshelfProperties", + "ChatModelDeployment", + "ChatModelDeploymentProperties", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "Identity", + "KeyVaultProperties", + "MoboBrokerResource", + "NodePool", + "NodePoolProperties", + "Operation", + "OperationDisplay", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionProperties", + "PrivateLinkResourceProperties", + "PrivateLinkServiceConnectionState", + "Project", + "ProjectProperties", + "ProjectSettings", + "ProxyResource", + "Resource", + "StorageAsset", + "StorageAssetProperties", + "StorageContainer", + "StorageContainerProperties", + "StorageStore", + "Supercomputer", + "SupercomputerIdentities", + "SupercomputerProperties", + "SystemData", + "Tool", + "ToolProperties", + "TrackedResource", + "UserAssignedIdentity", + "WithMoboBrokerResources", + "Workspace", + "WorkspacePrivateEndpointConnection", + "WorkspacePrivateLinkResource", + "WorkspaceProperties", + "ActionType", + "CreatedByType", + "CustomerManagedKeys", + "NetworkEgressType", + "Origin", + "PrivateEndpointConnectionProvisioningState", + "PrivateEndpointServiceConnectionStatus", + "ProvisioningState", + "PublicNetworkAccess", + "ScaleSetPriority", + "StorageStoreType", + "SystemSku", + "VmSize", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py new file mode 100644 index 000000000000..478933ec5c1e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_enums.py @@ -0,0 +1,179 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Extensible enum. Indicates the action type. "Internal" refers to actions that are for internal + only APIs. + """ + + INTERNAL = "Internal" + """Actions are for internal-only APIs.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The kind of entity that created the resource.""" + + USER = "User" + """The entity was created by a user.""" + APPLICATION = "Application" + """The entity was created by an application.""" + MANAGED_IDENTITY = "ManagedIdentity" + """The entity was created by a managed identity.""" + KEY = "Key" + """The entity was created by a key.""" + + +class CustomerManagedKeys(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of customer managed key usage.""" + + ENABLED = "Enabled" + """Customer managed keys are enabled.""" + DISABLED = "Disabled" + """Customer managed keys are disabled.""" + + +class NetworkEgressType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported network egress types.""" + + LOAD_BALANCER = "LoadBalancer" + """Public outbound network via load balancer (Default).""" + NONE = "None" + """No default outbound.""" + + +class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is "user,system". + """ + + USER = "user" + """Indicates the operation is initiated by a user.""" + SYSTEM = "system" + """Indicates the operation is initiated by a system.""" + USER_SYSTEM = "user,system" + """Indicates the operation is initiated by a user or system.""" + + +class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-long + str, Enum, metaclass=CaseInsensitiveEnumMeta +): + """The current provisioning state.""" + + SUCCEEDED = "Succeeded" + """Connection has been provisioned.""" + CREATING = "Creating" + """Connection is being created.""" + DELETING = "Deleting" + """Connection is being deleted.""" + FAILED = "Failed" + """Connection provisioning has failed.""" + + +class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The private endpoint connection status.""" + + PENDING = "Pending" + """Connection waiting for approval or rejection.""" + APPROVED = "Approved" + """Connection approved.""" + REJECTED = "Rejected" + """Connection Rejected.""" + + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The resource provisioning state.""" + + SUCCEEDED = "Succeeded" + """Resource has been created.""" + FAILED = "Failed" + """Resource creation failed.""" + CANCELED = "Canceled" + """Resource creation was canceled.""" + ACCEPTED = "Accepted" + """The resource create request has been accepted.""" + PROVISIONING = "Provisioning" + """The resource is being provisioned.""" + UPDATING = "Updating" + """The resource is updating.""" + DELETING = "Deleting" + """The resource is being deleted.""" + + +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of public network access.""" + + ENABLED = "Enabled" + """Public network access is enabled.""" + DISABLED = "Disabled" + """Public network access is disabled.""" + + +class ScaleSetPriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported Virtual Machine Scale Set priorities.""" + + REGULAR = "Regular" + """Regular priority Virtual Machine Scale Set.""" + SPOT = "Spot" + """Spot priority Virtual Machine Scale Set.""" + + +class StorageStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The kind of the backing storage store.""" + + AZURE_STORAGE_BLOB = "AzureStorageBlob" + """The Azure storage blob kind.""" + AZURE_NET_APP_FILES = "AzureNetAppFiles" + """The Azure NetApp Files kind.""" + + +class SystemSku(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported System SKU Sizes.""" + + STANDARD_D4_S_V6 = "Standard_D4s_v6" + """Standard_D4s_v6 basic compute VM (default).""" + STANDARD_D4_S_V5 = "Standard_D4s_v5" + """Standard_D4s_v5 SKU.""" + STANDARD_D4_S_V4 = "Standard_D4s_v4" + """Standard_D4s_v4 SKU.""" + + +class VmSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported Azure VM Sizes.""" + + STANDARD_NC24_ADS_A100_V4 = "Standard_NC24ads_A100_v4" + """Standard_NC24ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC48_ADS_A100_V4 = "Standard_NC48ads_A100_v4" + """Standard_NC48ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC96_ADS_A100_V4 = "Standard_NC96ads_A100_v4" + """Standard_NC96ads_A100_v4 GPU-optimized Azure VM Size.""" + STANDARD_NC4_AS_T4_V3 = "Standard_NC4as_T4_v3" + """Standard_NC4as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC8_AS_T4_V3 = "Standard_NC8as_T4_v3" + """Standard_NC8as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC16_AS_T4_V3 = "Standard_NC16as_T4_v3" + """Standard_NC16as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NC64_AS_T4_V3 = "Standard_NC64as_T4_v3" + """Standard_NC64as_T4_v3 GPU-optimized Azure VM Size.""" + STANDARD_NV6_ADS_A10_V5 = "Standard_NV6ads_A10_v5" + """Standard_NV6ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV12_ADS_A10_V5 = "Standard_NV12ads_A10_v5" + """Standard_NV12ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV24_ADS_A10_V5 = "Standard_NV24ads_A10_v5" + """Standard_NV24ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV36_ADS_A10_V5 = "Standard_NV36ads_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV36_ADMS_A10_V5 = "Standard_NV36adms_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_NV72_ADS_A10_V5 = "Standard_NV72ads_A10_v5" + """Standard_NV36ads_A10_v5 GPU-optimized Azure VM Size.""" + STANDARD_ND40_RS_V2 = "Standard_ND40rs_v2" + """Standard_ND40rs_v2 GPU-optimized Azure VM Size.""" diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py new file mode 100644 index 000000000000..79d04a2b71d2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_models.py @@ -0,0 +1,2095 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import StorageStoreType + +if TYPE_CHECKING: + from .. import models as _models + + +class StorageStore(_Model): + """An abstract representation of storage store kind. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureNetAppFilesStore, AzureStorageBlobStore + + :ivar kind: The storage store kind. Required. Known values are: "AzureStorageBlob" and + "AzureNetAppFiles". + :vartype kind: str or ~azure.mgmt.discovery.models.StorageStoreType + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create"]) + """The storage store kind. Required. Known values are: \"AzureStorageBlob\" and + \"AzureNetAppFiles\".""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureNetAppFilesStore(StorageStore, discriminator="AzureNetAppFiles"): + """The Azure NetApp Files properties. + + :ivar kind: Azure NetApp Files. Required. The Azure NetApp Files kind. + :vartype kind: str or ~azure.mgmt.discovery.models.AZURE_NET_APP_FILES + :ivar net_app_volume_id: The associated Azure NetApp Files volume ID. Required. + :vartype net_app_volume_id: str + """ + + kind: Literal[StorageStoreType.AZURE_NET_APP_FILES] = rest_discriminator(name="kind", visibility=["read", "create"]) # type: ignore + """Azure NetApp Files. Required. The Azure NetApp Files kind.""" + net_app_volume_id: str = rest_field(name="netAppVolumeId", visibility=["read", "create"]) + """The associated Azure NetApp Files volume ID. Required.""" + + @overload + def __init__( + self, + *, + net_app_volume_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = StorageStoreType.AZURE_NET_APP_FILES # type: ignore + + +class AzureStorageBlobStore(StorageStore, discriminator="AzureStorageBlob"): + """The Azure storage blob properties. + + :ivar kind: Azure Storage Blob. Required. The Azure storage blob kind. + :vartype kind: str or ~azure.mgmt.discovery.models.AZURE_STORAGE_BLOB + :ivar storage_account_id: The associated Azure Storage Account ID. Required. + :vartype storage_account_id: str + """ + + kind: Literal[StorageStoreType.AZURE_STORAGE_BLOB] = rest_discriminator(name="kind", visibility=["read", "create"]) # type: ignore + """Azure Storage Blob. Required. The Azure storage blob kind.""" + storage_account_id: str = rest_field(name="storageAccountId", visibility=["read", "create"]) + """The associated Azure Storage Account ID. Required.""" + + @overload + def __init__( + self, + *, + storage_account_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = StorageStoreType.AZURE_STORAGE_BLOB # type: ignore + + +class Resource(_Model): + """Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.""" + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the resource.""" + type: Optional[str] = rest_field(visibility=["read"]) + """The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or + \"Microsoft.Storage/storageAccounts\".""" + system_data: Optional["_models.SystemData"] = rest_field(name="systemData", visibility=["read"]) + """Azure Resource Manager metadata containing createdBy and modifiedBy information.""" + + +class TrackedResource(Resource): + """Tracked Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + location: str = rest_field(visibility=["read", "create"]) + """The geo-location where the resource lives. Required.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Bookshelf(TrackedResource): + """Bookshelf tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.BookshelfProperties + """ + + properties: Optional["_models.BookshelfProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.BookshelfProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfKeyVaultProperties(_Model): + """Key Vault Properties with clientId selection. + + :ivar key_vault_uri: The Key Vault URI. Required. + :vartype key_vault_uri: str + :ivar key_name: The Key Name in Key Vault. Required. + :vartype key_name: str + :ivar key_version: The Key Version in Key Vault. + :vartype key_version: str + :ivar identity_client_id: The client ID of the identity to use for accessing the Key Vault. + Must be a workload identity assigned to the Bookshelf resource. Required. + :vartype identity_client_id: str + """ + + key_vault_uri: str = rest_field(name="keyVaultUri", visibility=["read", "create"]) + """The Key Vault URI. Required.""" + key_name: str = rest_field(name="keyName", visibility=["read", "create", "update"]) + """The Key Name in Key Vault. Required.""" + key_version: Optional[str] = rest_field(name="keyVersion", visibility=["read", "create", "update"]) + """The Key Version in Key Vault.""" + identity_client_id: str = rest_field(name="identityClientId", visibility=["read", "create"]) + """The client ID of the identity to use for accessing the Key Vault. Must be a workload identity + assigned to the Bookshelf resource. Required.""" + + @overload + def __init__( + self, + *, + key_vault_uri: str, + key_name: str, + identity_client_id: str, + key_version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProxyResource(Resource): + """Proxy Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + """ + + +class BookshelfPrivateEndpointConnection(ProxyResource): + """The Private Endpoint Connection resource for Bookshelf. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfPrivateLinkResource(ProxyResource): + """A private link resource for Bookshelf. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BookshelfProperties(_Model): + """Bookshelf properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar workload_identities: User assigned identity IDs to be used by knowledgebase workloads. + The key value must be the resource ID of the identity resource. + :vartype workload_identities: dict[str, ~azure.mgmt.discovery.models.UserAssignedIdentity] + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar key_vault_properties: The key to use for encrypting data at rest when customer managed + keys are enabled. Required if Customer Managed Keys is enabled. + :vartype key_vault_properties: ~azure.mgmt.discovery.models.BookshelfKeyVaultProperties + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar private_endpoint_connections: List of private endpoint connections. + :vartype private_endpoint_connections: + list[~azure.mgmt.discovery.models.PrivateEndpointConnection] + :ivar public_network_access: Whether or not public network access is allowed for this resource. + For security reasons, it is recommended to disable it whenever possible. Known values are: + "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.discovery.models.PublicNetworkAccess + :ivar private_endpoint_subnet_id: Private Endpoint Subnet ID for private endpoint connections. + :vartype private_endpoint_subnet_id: str + :ivar search_subnet_id: Search Subnet ID for search resources. + :vartype search_subnet_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + :ivar bookshelf_uri: The bookshelf data plane API URI. + :vartype bookshelf_uri: str + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="workloadIdentities", visibility=["read", "create"] + ) + """User assigned identity IDs to be used by knowledgebase workloads. The key value must be the + resource ID of the identity resource.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + key_vault_properties: Optional["_models.BookshelfKeyVaultProperties"] = rest_field( + name="keyVaultProperties", visibility=["read", "create", "update"] + ) + """The key to use for encrypting data at rest when customer managed keys are enabled. Required if + Customer Managed Keys is enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """List of private endpoint connections.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update"] + ) + """Whether or not public network access is allowed for this resource. For security reasons, it is + recommended to disable it whenever possible. Known values are: \"Enabled\" and \"Disabled\".""" + private_endpoint_subnet_id: Optional[str] = rest_field( + name="privateEndpointSubnetId", visibility=["read", "create"] + ) + """Private Endpoint Subnet ID for private endpoint connections.""" + search_subnet_id: Optional[str] = rest_field(name="searchSubnetId", visibility=["read", "create"]) + """Search Subnet ID for search resources.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + bookshelf_uri: Optional[str] = rest_field(name="bookshelfUri", visibility=["read"]) + """The bookshelf data plane API URI.""" + + @overload + def __init__( + self, + *, + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + key_vault_properties: Optional["_models.BookshelfKeyVaultProperties"] = None, + log_analytics_cluster_id: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + private_endpoint_subnet_id: Optional[str] = None, + search_subnet_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatModelDeployment(TrackedResource): + """Represents a deployment that ties a specific model family to a user defined deployment name + used when invoking the chat model. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ChatModelDeploymentProperties + """ + + properties: Optional["_models.ChatModelDeploymentProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ChatModelDeploymentProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatModelDeploymentProperties(_Model): + """Defines a deployment binding a specific model family to a user-defined deployment name for chat + inference. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar model_format: Model format as published by the provider. Verify supported formats per + region using the Model Catalog API. Required. + :vartype model_format: str + :ivar model_name: Canonical provider model name available in the selected region. Verify + supported values per region using the Model Catalog API. Required. + :vartype model_name: str + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + model_format: str = rest_field(name="modelFormat", visibility=["read", "create"]) + """Model format as published by the provider. Verify supported formats per region using the Model + Catalog API. Required.""" + model_name: str = rest_field(name="modelName", visibility=["read", "create"]) + """Canonical provider model name available in the selected region. Verify supported values per + region using the Model Catalog API. Required.""" + + @overload + def __init__( + self, + *, + model_format: str, + model_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorAdditionalInfo(_Model): + """The resource management error additional info. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The additional info type.""" + info: Optional[Any] = rest_field(visibility=["read"]) + """The additional info.""" + + +class ErrorDetail(_Model): + """The error detail. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.discovery.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.discovery.models.ErrorAdditionalInfo] + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + target: Optional[str] = rest_field(visibility=["read"]) + """The error target.""" + details: Optional[list["_models.ErrorDetail"]] = rest_field(visibility=["read"]) + """The error details.""" + additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = rest_field( + name="additionalInfo", visibility=["read"] + ) + """The error additional info.""" + + +class ErrorResponse(_Model): + """Error response. + + :ivar error: The error object. + :vartype error: ~azure.mgmt.discovery.models.ErrorDetail + """ + + error: Optional["_models.ErrorDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Identity(_Model): + """For user assigned identity resource property. + + :ivar id: The resource ID of the user assigned identity. Required. + :vartype id: str + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + id: str = rest_field(visibility=["read", "create", "update"]) + """The resource ID of the user assigned identity. Required.""" + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of the assigned identity.""" + client_id: Optional[str] = rest_field(name="clientId", visibility=["read"]) + """The client ID of the assigned identity.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KeyVaultProperties(_Model): + """For Key Vault Key references. + + :ivar key_vault_uri: The Key Vault URI. Required. + :vartype key_vault_uri: str + :ivar key_name: The Key Name in Key Vault. Required. + :vartype key_name: str + :ivar key_version: The Key Version in Key Vault. + :vartype key_version: str + """ + + key_vault_uri: str = rest_field(name="keyVaultUri", visibility=["read", "create"]) + """The Key Vault URI. Required.""" + key_name: str = rest_field(name="keyName", visibility=["read", "create", "update"]) + """The Key Name in Key Vault. Required.""" + key_version: Optional[str] = rest_field(name="keyVersion", visibility=["read", "create", "update"]) + """The Key Version in Key Vault.""" + + @overload + def __init__( + self, + *, + key_vault_uri: str, + key_name: str, + key_version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MoboBrokerResource(_Model): + """Managed-On-Behalf-Of broker resource. This resource is created by the Resource Provider to + manage some resources on behalf of the user. + + :ivar id: Resource identifier of a Managed-On-Behalf-Of broker resource. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource identifier of a Managed-On-Behalf-Of broker resource.""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NodePool(TrackedResource): + """NodePool tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.NodePoolProperties + """ + + properties: Optional["_models.NodePoolProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.NodePoolProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NodePoolProperties(_Model): + """NodePool properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar subnet_id: The node pool subnet. Required. + :vartype subnet_id: str + :ivar vm_size: The size of the underlying Azure VM. Required. Known values are: + "Standard_NC24ads_A100_v4", "Standard_NC48ads_A100_v4", "Standard_NC96ads_A100_v4", + "Standard_NC4as_T4_v3", "Standard_NC8as_T4_v3", "Standard_NC16as_T4_v3", + "Standard_NC64as_T4_v3", "Standard_NV6ads_A10_v5", "Standard_NV12ads_A10_v5", + "Standard_NV24ads_A10_v5", "Standard_NV36ads_A10_v5", "Standard_NV36adms_A10_v5", + "Standard_NV72ads_A10_v5", and "Standard_ND40rs_v2". + :vartype vm_size: str or ~azure.mgmt.discovery.models.VmSize + :ivar max_node_count: The maximum number of nodes. Required. + :vartype max_node_count: int + :ivar min_node_count: The minimum number of nodes. + :vartype min_node_count: int + :ivar scale_set_priority: The Virtual Machine Scale Set priority. If not specified, the default + is 'Regular'. Known values are: "Regular" and "Spot". + :vartype scale_set_priority: str or ~azure.mgmt.discovery.models.ScaleSetPriority + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + subnet_id: str = rest_field(name="subnetId", visibility=["read", "create"]) + """The node pool subnet. Required.""" + vm_size: Union[str, "_models.VmSize"] = rest_field(name="vmSize", visibility=["read", "create"]) + """The size of the underlying Azure VM. Required. Known values are: \"Standard_NC24ads_A100_v4\", + \"Standard_NC48ads_A100_v4\", \"Standard_NC96ads_A100_v4\", \"Standard_NC4as_T4_v3\", + \"Standard_NC8as_T4_v3\", \"Standard_NC16as_T4_v3\", \"Standard_NC64as_T4_v3\", + \"Standard_NV6ads_A10_v5\", \"Standard_NV12ads_A10_v5\", \"Standard_NV24ads_A10_v5\", + \"Standard_NV36ads_A10_v5\", \"Standard_NV36adms_A10_v5\", \"Standard_NV72ads_A10_v5\", and + \"Standard_ND40rs_v2\".""" + max_node_count: int = rest_field(name="maxNodeCount", visibility=["read", "create", "update"]) + """The maximum number of nodes. Required.""" + min_node_count: Optional[int] = rest_field(name="minNodeCount", visibility=["read", "create", "update"]) + """The minimum number of nodes.""" + scale_set_priority: Optional[Union[str, "_models.ScaleSetPriority"]] = rest_field( + name="scaleSetPriority", visibility=["read", "create"] + ) + """The Virtual Machine Scale Set priority. If not specified, the default is 'Regular'. Known + values are: \"Regular\" and \"Spot\".""" + + @overload + def __init__( + self, + *, + subnet_id: str, + vm_size: Union[str, "_models.VmSize"], + max_node_count: int, + min_node_count: Optional[int] = None, + scale_set_priority: Optional[Union[str, "_models.ScaleSetPriority"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Operation(_Model): + """REST API Operation. + + :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". + :vartype name: str + :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for + data-plane operations and "false" for Azure Resource Manager/control-plane operations. + :vartype is_data_action: bool + :ivar display: Localized display information for this particular operation. + :vartype display: ~azure.mgmt.discovery.models.OperationDisplay + :ivar origin: The intended executor of the operation; as in Resource Based Access Control + (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", + and "user,system". + :vartype origin: str or ~azure.mgmt.discovery.models.Origin + :ivar action_type: Extensible enum. Indicates the action type. "Internal" refers to actions + that are for internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.discovery.models.ActionType + """ + + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + \"Microsoft.Compute/virtualMachines/write\", + \"Microsoft.Compute/virtualMachines/capture/action\".""" + is_data_action: Optional[bool] = rest_field(name="isDataAction", visibility=["read"]) + """Whether the operation applies to data-plane. This is \"true\" for data-plane operations and + \"false\" for Azure Resource Manager/control-plane operations.""" + display: Optional["_models.OperationDisplay"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Localized display information for this particular operation.""" + origin: Optional[Union[str, "_models.Origin"]] = rest_field(visibility=["read"]) + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is \"user,system\". Known values are: \"user\", \"system\", and + \"user,system\".""" + action_type: Optional[Union[str, "_models.ActionType"]] = rest_field(name="actionType", visibility=["read"]) + """Extensible enum. Indicates the action type. \"Internal\" refers to actions that are for + internal only APIs. \"Internal\"""" + + @overload + def __init__( + self, + *, + display: Optional["_models.OperationDisplay"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OperationDisplay(_Model): + """Localized display information for an operation. + + :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft + Monitoring Insights" or "Microsoft Compute". + :vartype provider: str + :ivar resource: The localized friendly name of the resource type related to this operation. + E.g. "Virtual Machines" or "Job Schedule Collections". + :vartype resource: str + :ivar operation: The concise, localized friendly name for the operation; suitable for + dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". + :vartype operation: str + :ivar description: The short, localized friendly description of the operation; suitable for + tool tips and detailed views. + :vartype description: str + """ + + provider: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring + Insights\" or \"Microsoft Compute\".""" + resource: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly name of the resource type related to this operation. E.g. \"Virtual + Machines\" or \"Job Schedule Collections\".""" + operation: Optional[str] = rest_field(visibility=["read"]) + """The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create + or Update Virtual Machine\", \"Restart Virtual Machine\".""" + description: Optional[str] = rest_field(visibility=["read"]) + """The short, localized friendly description of the operation; suitable for tool tips and detailed + views.""" + + +class PrivateEndpoint(_Model): + """The private endpoint resource. + + :ivar id: The resource identifier of the private endpoint. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """The resource identifier of the private endpoint.""" + + +class PrivateEndpointConnection(Resource): + """The private endpoint connection resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The private endpoint connection properties. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The private endpoint connection properties.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateEndpointConnectionProperties(_Model): + """Properties of the private endpoint connection. + + :ivar group_ids: The group ids for the private endpoint resource. + :vartype group_ids: list[str] + :ivar private_endpoint: The private endpoint resource. + :vartype private_endpoint: ~azure.mgmt.discovery.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. Required. + :vartype private_link_service_connection_state: + ~azure.mgmt.discovery.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.discovery.models.PrivateEndpointConnectionProvisioningState + """ + + group_ids: Optional[list[str]] = rest_field(name="groupIds", visibility=["read"]) + """The group ids for the private endpoint resource.""" + private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field( + name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"] + ) + """The private endpoint resource.""" + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState" = rest_field( + name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"] + ) + """A collection of information about the state of the connection between service consumer and + provider. Required.""" + provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of the private endpoint connection resource. Known values are: + \"Succeeded\", \"Creating\", \"Deleting\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState", + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResourceProperties(_Model): + """Properties of a private link resource. + + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + group_id: Optional[str] = rest_field(name="groupId", visibility=["read"]) + """The private link resource group id.""" + required_members: Optional[list[str]] = rest_field(name="requiredMembers", visibility=["read"]) + """The private link resource required member names.""" + required_zone_names: Optional[list[str]] = rest_field( + name="requiredZoneNames", visibility=["read", "create", "update", "delete", "query"] + ) + """The private link resource private link DNS zone name.""" + + @overload + def __init__( + self, + *, + required_zone_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkServiceConnectionState(_Model): + """A collection of information about the state of the connection between service consumer and + provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", and "Rejected". + :vartype status: str or ~azure.mgmt.discovery.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the connection has been Approved/Rejected/Removed by the owner of the + service. Known values are: \"Pending\", \"Approved\", and \"Rejected\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The reason for approval/rejection of the connection.""" + actions_required: Optional[str] = rest_field( + name="actionsRequired", visibility=["read", "create", "update", "delete", "query"] + ) + """A message indicating if changes on the service provider require any updates on the consumer.""" + + @overload + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Project(TrackedResource): + """Project tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ProjectProperties + """ + + properties: Optional["_models.ProjectProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ProjectProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectProperties(_Model): + """Project properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar foundry_project_endpoint: Foundry project endpoint URI. + :vartype foundry_project_endpoint: str + :ivar storage_container_ids: Allowed StorageContainers (Control plane resource references). + :vartype storage_container_ids: list[str] + :ivar settings: Settings for the project. + :vartype settings: ~azure.mgmt.discovery.models.ProjectSettings + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + foundry_project_endpoint: Optional[str] = rest_field(name="foundryProjectEndpoint", visibility=["read"]) + """Foundry project endpoint URI.""" + storage_container_ids: Optional[list[str]] = rest_field(name="storageContainerIds", visibility=["read", "create"]) + """Allowed StorageContainers (Control plane resource references).""" + settings: Optional["_models.ProjectSettings"] = rest_field(visibility=["read", "create", "update"]) + """Settings for the project.""" + + @overload + def __init__( + self, + *, + storage_container_ids: Optional[list[str]] = None, + settings: Optional["_models.ProjectSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectSettings(_Model): + """Settings schema for the project. + + :ivar behavior_preferences: Default preferences to guide AI behaviors in this project. + :vartype behavior_preferences: str + """ + + behavior_preferences: Optional[str] = rest_field( + name="behaviorPreferences", visibility=["read", "create", "update", "delete", "query"] + ) + """Default preferences to guide AI behaviors in this project.""" + + @overload + def __init__( + self, + *, + behavior_preferences: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAsset(TrackedResource): + """Storage Asset tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.StorageAssetProperties + """ + + properties: Optional["_models.StorageAssetProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.StorageAssetProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageAssetProperties(_Model): + """Storage Asset properties. + + :ivar description: The description. Required. + :vartype description: str + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar path: The path to the data within its parent container. This should be relative to the + root of the parent container. + :vartype path: str + """ + + description: str = rest_field(visibility=["read", "create", "update"]) + """The description. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + path: Optional[str] = rest_field(visibility=["read", "create"]) + """The path to the data within its parent container. This should be relative to the root of the + parent container.""" + + @overload + def __init__( + self, + *, + description: str, + path: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageContainer(TrackedResource): + """Storage Container tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.StorageContainerProperties + """ + + properties: Optional["_models.StorageContainerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.StorageContainerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageContainerProperties(_Model): + """Storage Container properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar storage_store: Storage store properties. Required. + :vartype storage_store: ~azure.mgmt.discovery.models.StorageStore + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + storage_store: "_models.StorageStore" = rest_field(name="storageStore", visibility=["read", "create"]) + """Storage store properties. Required.""" + + @overload + def __init__( + self, + *, + storage_store: "_models.StorageStore", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Supercomputer(TrackedResource): + """Supercomputer tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.SupercomputerProperties + """ + + properties: Optional["_models.SupercomputerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.SupercomputerProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SupercomputerIdentities(_Model): + """Dictionary of identity properties for the Supercomputer. + + :ivar cluster_identity: Cluster identity ID. Required. + :vartype cluster_identity: ~azure.mgmt.discovery.models.Identity + :ivar kubelet_identity: Kubelet identity ID used by the supercomputer. This identity is used by + the supercomputer at node level to access Azure resources. This identity must have + ManagedIdentityOperator role on the clusterIdentity. Required. + :vartype kubelet_identity: ~azure.mgmt.discovery.models.Identity + :ivar workload_identities: User assigned identity IDs to be used by workloads as federated + credentials running on supercomputer. The key value must be the resource ID of the identity + resource. + :vartype workload_identities: dict[str, ~azure.mgmt.discovery.models.UserAssignedIdentity] + """ + + cluster_identity: "_models.Identity" = rest_field(name="clusterIdentity", visibility=["read", "create"]) + """Cluster identity ID. Required.""" + kubelet_identity: "_models.Identity" = rest_field(name="kubeletIdentity", visibility=["read", "create"]) + """Kubelet identity ID used by the supercomputer. This identity is used by the supercomputer at + node level to access Azure resources. This identity must have ManagedIdentityOperator role on + the clusterIdentity. Required.""" + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="workloadIdentities", visibility=["read", "create", "update"] + ) + """User assigned identity IDs to be used by workloads as federated credentials running on + supercomputer. The key value must be the resource ID of the identity resource.""" + + @overload + def __init__( + self, + *, + cluster_identity: "_models.Identity", + kubelet_identity: "_models.Identity", + workload_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SupercomputerProperties(_Model): + """Supercomputer properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar subnet_id: System Subnet ID associated with managed NodePool for system resources. It + should have connectivity to the child NodePool subnets. Required. + :vartype subnet_id: str + :ivar management_subnet_id: System Subnet ID associated with AKS apiserver. Must be delegated + to Microsoft.ContainerService/managedClusters. It should have connectivity to the system subnet + and nodepool subnets. + :vartype management_subnet_id: str + :ivar outbound_type: Network egress type provisioned for the supercomputer workloads. Defaults + to LoadBalancer if not specified. If None is specified, the customer is responsible for + providing outbound connectivity for Supercomputer functionality. Known values are: + "LoadBalancer" and "None". + :vartype outbound_type: str or ~azure.mgmt.discovery.models.NetworkEgressType + :ivar system_sku: The SKU to use for the system node pool. Known values are: "Standard_D4s_v6", + "Standard_D4s_v5", and "Standard_D4s_v4". + :vartype system_sku: str or ~azure.mgmt.discovery.models.SystemSku + :ivar identities: Dictionary of identity properties. Required. + :vartype identities: ~azure.mgmt.discovery.models.SupercomputerIdentities + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar disk_encryption_set_id: Disk Encryption Set ID to use for Customer Managed Keys + encryption. Required if Customer Managed Keys is enabled. + :vartype disk_encryption_set_id: str + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + subnet_id: str = rest_field(name="subnetId", visibility=["read", "create"]) + """System Subnet ID associated with managed NodePool for system resources. It should have + connectivity to the child NodePool subnets. Required.""" + management_subnet_id: Optional[str] = rest_field(name="managementSubnetId", visibility=["read", "create"]) + """System Subnet ID associated with AKS apiserver. Must be delegated to + Microsoft.ContainerService/managedClusters. It should have connectivity to the system subnet + and nodepool subnets.""" + outbound_type: Optional[Union[str, "_models.NetworkEgressType"]] = rest_field( + name="outboundType", visibility=["read", "create"] + ) + """Network egress type provisioned for the supercomputer workloads. Defaults to LoadBalancer if + not specified. If None is specified, the customer is responsible for providing outbound + connectivity for Supercomputer functionality. Known values are: \"LoadBalancer\" and \"None\".""" + system_sku: Optional[Union[str, "_models.SystemSku"]] = rest_field(name="systemSku", visibility=["read", "create"]) + """The SKU to use for the system node pool. Known values are: \"Standard_D4s_v6\", + \"Standard_D4s_v5\", and \"Standard_D4s_v4\".""" + identities: "_models.SupercomputerIdentities" = rest_field(visibility=["read", "create", "update"]) + """Dictionary of identity properties. Required.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + disk_encryption_set_id: Optional[str] = rest_field(name="diskEncryptionSetId", visibility=["read", "create"]) + """Disk Encryption Set ID to use for Customer Managed Keys encryption. Required if Customer + Managed Keys is enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + + @overload + def __init__( + self, + *, + subnet_id: str, + identities: "_models.SupercomputerIdentities", + management_subnet_id: Optional[str] = None, + outbound_type: Optional[Union[str, "_models.NetworkEgressType"]] = None, + system_sku: Optional[Union[str, "_models.SystemSku"]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + disk_encryption_set_id: Optional[str] = None, + log_analytics_cluster_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SystemData(_Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.discovery.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.discovery.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read", "create", "update", "delete", "query"]) + """The identity that created the resource.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that created the resource. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + created_at: Optional[datetime.datetime] = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource creation (UTC).""" + last_modified_by: Optional[str] = rest_field( + name="lastModifiedBy", visibility=["read", "create", "update", "delete", "query"] + ) + """The identity that last modified the resource.""" + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that last modified the resource. Known values are: \"User\", + \"Application\", \"ManagedIdentity\", and \"Key\".""" + last_modified_at: Optional[datetime.datetime] = rest_field( + name="lastModifiedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource last modification (UTC).""" + + @overload + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Tool(TrackedResource): + """Tool tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.ToolProperties + """ + + properties: Optional["_models.ToolProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.ToolProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProperties(_Model): + """Discovery Tool list item properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar version: The version of a resource definition. Required. + :vartype version: str + :ivar environment_variables: Environment variables to make available. + :vartype environment_variables: dict[str, str] + :ivar definition_content: The JSON content for defining a resource. Required. + :vartype definition_content: dict[str, any] + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + version: str = rest_field(visibility=["read", "create", "update"]) + """The version of a resource definition. Required.""" + environment_variables: Optional[dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update"] + ) + """Environment variables to make available.""" + definition_content: dict[str, Any] = rest_field(name="definitionContent", visibility=["read", "create", "update"]) + """The JSON content for defining a resource. Required.""" + + @overload + def __init__( + self, + *, + version: str, + definition_content: dict[str, Any], + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UserAssignedIdentity(_Model): + """User assigned identity properties. + + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The principal ID of the assigned identity.""" + client_id: Optional[str] = rest_field(name="clientId", visibility=["read"]) + """The client ID of the assigned identity.""" + + +class WithMoboBrokerResources(_Model): + """For tracking mobo resources. + + :ivar mobo_broker_resources: Managed-On-Behalf-Of broker resources. + :vartype mobo_broker_resources: list[~azure.mgmt.discovery.models.MoboBrokerResource] + """ + + mobo_broker_resources: Optional[list["_models.MoboBrokerResource"]] = rest_field( + name="moboBrokerResources", visibility=["read"] + ) + """Managed-On-Behalf-Of broker resources.""" + + +class Workspace(TrackedResource): + """Workspace tracked resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.WorkspaceProperties + """ + + properties: Optional["_models.WorkspaceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.WorkspaceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspacePrivateEndpointConnection(ProxyResource): + """The Private Endpoint Connection resource for Workspace. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspacePrivateLinkResource(ProxyResource): + """A private link resource for Workspace. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.discovery.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.discovery.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkspaceProperties(_Model): + """Workspace properties. + + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Accepted", "Provisioning", "Updating", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.discovery.models.ProvisioningState + :ivar supercomputer_ids: List of linked SuperComputers. + :vartype supercomputer_ids: list[str] + :ivar workspace_api_uri: workspace API endpoint Uri. + :vartype workspace_api_uri: str + :ivar workspace_ui_uri: workspace User Interface Uri. + :vartype workspace_ui_uri: str + :ivar workspace_identity: Identity IDs used for leveraging Workspace resources. Required. + :vartype workspace_identity: ~azure.mgmt.discovery.models.Identity + :ivar customer_managed_keys: Whether or not to use a customer managed key when encrypting data + at rest. Known values are: "Enabled" and "Disabled". + :vartype customer_managed_keys: str or ~azure.mgmt.discovery.models.CustomerManagedKeys + :ivar key_vault_properties: The key to use for encrypting data at rest when customer managed + keys are enabled. + :vartype key_vault_properties: ~azure.mgmt.discovery.models.KeyVaultProperties + :ivar log_analytics_cluster_id: The Log Analytics Cluster to use for debug logs. This is + required when Customer Managed Keys are enabled. + :vartype log_analytics_cluster_id: str + :ivar private_endpoint_connections: List of private endpoint connections. + :vartype private_endpoint_connections: + list[~azure.mgmt.discovery.models.PrivateEndpointConnection] + :ivar public_network_access: Whether or not public network access is allowed for this resource. + For security reasons, it is recommended to disable it whenever possible. Known values are: + "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.discovery.models.PublicNetworkAccess + :ivar agent_subnet_id: Agent Subnet ID for agent resources. + :vartype agent_subnet_id: str + :ivar private_endpoint_subnet_id: Private Endpoint Subnet ID for private endpoint connections. + :vartype private_endpoint_subnet_id: str + :ivar workspace_subnet_id: Function Subnet ID for workspace resources. + :vartype workspace_subnet_id: str + :ivar managed_resource_group: The resource group for resources managed on behalf of customer. + :vartype managed_resource_group: str + :ivar managed_on_behalf_of_configuration: Managed-On-Behalf-Of configuration properties. This + configuration exists for the resources where a resource provider manages those resources on + behalf of the resource owner. + :vartype managed_on_behalf_of_configuration: + ~azure.mgmt.discovery.models.WithMoboBrokerResources + """ + + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Accepted\", \"Provisioning\", \"Updating\", and \"Deleting\".""" + supercomputer_ids: Optional[list[str]] = rest_field( + name="supercomputerIds", visibility=["read", "create", "update"] + ) + """List of linked SuperComputers.""" + workspace_api_uri: Optional[str] = rest_field(name="workspaceApiUri", visibility=["read"]) + """workspace API endpoint Uri.""" + workspace_ui_uri: Optional[str] = rest_field(name="workspaceUiUri", visibility=["read"]) + """workspace User Interface Uri.""" + workspace_identity: "_models.Identity" = rest_field(name="workspaceIdentity", visibility=["read", "create"]) + """Identity IDs used for leveraging Workspace resources. Required.""" + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = rest_field( + name="customerManagedKeys", visibility=["read", "create"] + ) + """Whether or not to use a customer managed key when encrypting data at rest. Known values are: + \"Enabled\" and \"Disabled\".""" + key_vault_properties: Optional["_models.KeyVaultProperties"] = rest_field( + name="keyVaultProperties", visibility=["read", "create", "update"] + ) + """The key to use for encrypting data at rest when customer managed keys are enabled.""" + log_analytics_cluster_id: Optional[str] = rest_field(name="logAnalyticsClusterId", visibility=["read", "create"]) + """The Log Analytics Cluster to use for debug logs. This is required when Customer Managed Keys + are enabled.""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """List of private endpoint connections.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update"] + ) + """Whether or not public network access is allowed for this resource. For security reasons, it is + recommended to disable it whenever possible. Known values are: \"Enabled\" and \"Disabled\".""" + agent_subnet_id: Optional[str] = rest_field(name="agentSubnetId", visibility=["read", "create"]) + """Agent Subnet ID for agent resources.""" + private_endpoint_subnet_id: Optional[str] = rest_field( + name="privateEndpointSubnetId", visibility=["read", "create"] + ) + """Private Endpoint Subnet ID for private endpoint connections.""" + workspace_subnet_id: Optional[str] = rest_field(name="workspaceSubnetId", visibility=["read", "create"]) + """Function Subnet ID for workspace resources.""" + managed_resource_group: Optional[str] = rest_field(name="managedResourceGroup", visibility=["read"]) + """The resource group for resources managed on behalf of customer.""" + managed_on_behalf_of_configuration: Optional["_models.WithMoboBrokerResources"] = rest_field( + name="managedOnBehalfOfConfiguration", visibility=["read"] + ) + """Managed-On-Behalf-Of configuration properties. This configuration exists for the resources + where a resource provider manages those resources on behalf of the resource owner.""" + + @overload + def __init__( + self, + *, + workspace_identity: "_models.Identity", + supercomputer_ids: Optional[list[str]] = None, + customer_managed_keys: Optional[Union[str, "_models.CustomerManagedKeys"]] = None, + key_vault_properties: Optional["_models.KeyVaultProperties"] = None, + log_analytics_cluster_id: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + agent_subnet_id: Optional[str] = None, + private_endpoint_subnet_id: Optional[str] = None, + workspace_subnet_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py new file mode 100644 index 000000000000..6a22eb1590c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import Operations # type: ignore +from ._operations import BookshelvesOperations # type: ignore +from ._operations import BookshelfPrivateEndpointConnectionsOperations # type: ignore +from ._operations import BookshelfPrivateLinkResourcesOperations # type: ignore +from ._operations import ToolsOperations # type: ignore +from ._operations import ProjectsOperations # type: ignore +from ._operations import WorkspacesOperations # type: ignore +from ._operations import WorkspacePrivateEndpointConnectionsOperations # type: ignore +from ._operations import ChatModelDeploymentsOperations # type: ignore +from ._operations import WorkspacePrivateLinkResourcesOperations # type: ignore +from ._operations import NodePoolsOperations # type: ignore +from ._operations import SupercomputersOperations # type: ignore +from ._operations import StorageAssetsOperations # type: ignore +from ._operations import StorageContainersOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "Operations", + "BookshelvesOperations", + "BookshelfPrivateEndpointConnectionsOperations", + "BookshelfPrivateLinkResourcesOperations", + "ToolsOperations", + "ProjectsOperations", + "WorkspacesOperations", + "WorkspacePrivateEndpointConnectionsOperations", + "ChatModelDeploymentsOperations", + "WorkspacePrivateLinkResourcesOperations", + "NodePoolsOperations", + "SupercomputersOperations", + "StorageAssetsOperations", + "StorageContainersOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py new file mode 100644 index 000000000000..1de2eb11a27d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py @@ -0,0 +1,11023 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._configuration import DiscoveryClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Deserializer, Serializer + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] +List = list + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_operations_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/providers/Microsoft.Discovery/operations" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_get_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_update_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_delete_request( + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_bookshelves_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = ( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelves_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/bookshelves" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_link_resources_get_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateLinkResources/{privateLinkResourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_bookshelf_private_link_resources_list_by_bookshelf_request( # pylint: disable=name-too-long + resource_group_name: str, bookshelf_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/bookshelves/{bookshelfName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "bookshelfName": _SERIALIZER.url("bookshelf_name", bookshelf_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_get_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_create_or_update_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_update_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_delete_request( + resource_group_name: str, tool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools/{toolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "toolName": _SERIALIZER.url("tool_name", tool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_tools_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/tools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_tools_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/tools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_get_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_create_or_update_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_update_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_projects_delete_request( + resource_group_name: str, workspace_name: str, project_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects/{projectName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_projects_list_by_workspace_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/projects" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_get_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_delete_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_workspaces_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspaces_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/workspaces" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_workspace_private_endpoint_connections_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_get_request( + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_update_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_chat_model_deployments_delete_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments/{chatModelDeploymentName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "chatModelDeploymentName": _SERIALIZER.url("chat_model_deployment_name", chat_model_deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_chat_model_deployments_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/chatModelDeployments" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_link_resources_get_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateLinkResources/{privateLinkResourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_workspace_private_link_resources_list_by_workspace_request( # pylint: disable=name-too-long + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/workspaces/{workspaceName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_get_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_update_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_node_pools_delete_request( + resource_group_name: str, supercomputer_name: str, node_pool_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools/{nodePoolName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + "nodePoolName": _SERIALIZER.url("node_pool_name", node_pool_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_node_pools_list_by_supercomputer_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}/nodePools" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_get_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_update_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_delete_request( + resource_group_name: str, supercomputer_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers/{supercomputerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "supercomputerName": _SERIALIZER.url("supercomputer_name", supercomputer_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_supercomputers_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/supercomputers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_supercomputers_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/supercomputers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_get_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_update_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_assets_delete_request( + resource_group_name: str, storage_container_name: str, storage_asset_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets/{storageAssetName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + "storageAssetName": _SERIALIZER.url("storage_asset_name", storage_asset_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_assets_list_by_storage_container_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}/storageAssets" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_get_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_update_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_delete_request( + resource_group_name: str, storage_container_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers/{storageContainerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageContainerName": _SERIALIZER.url("storage_container_name", storage_container_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_storage_containers_list_by_resource_group_request( # pylint: disable=name-too-long + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Discovery/storageContainers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_storage_containers_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-02-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.Discovery/storageContainers" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelvesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`bookshelves` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> _models.Bookshelf: + """Get a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: Bookshelf. The Bookshelf is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Bookshelf + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + + _request = build_bookshelves_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Bookshelf, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + resource: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Create a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param resource: Resource create parameters. Is one of the following types: Bookshelf, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelves_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: _models.Bookshelf, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + bookshelf_name: str, + properties: Union[_models.Bookshelf, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Bookshelf]: + """Update a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Bookshelf, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Bookshelf or JSON or IO[bytes] + :return: An instance of LROPoller that returns Bookshelf. The Bookshelf is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Bookshelf] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Bookshelf, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Bookshelf].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Bookshelf]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelves_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, bookshelf_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Bookshelf"]: + """List Bookshelf resources by subscription ID. + + :return: An iterator like instance of Bookshelf + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Bookshelf] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Bookshelf]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelves_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Bookshelf], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`bookshelf_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: BookshelfPrivateEndpointConnection. The BookshelfPrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_bookshelf_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: _models.BookshelfPrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + bookshelf_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.BookshelfPrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BookshelfPrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + BookshelfPrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of LROPoller that returns BookshelfPrivateEndpointConnection. The + BookshelfPrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BookshelfPrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BookshelfPrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BookshelfPrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BookshelfPrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, bookshelf_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> ItemPaged["_models.BookshelfPrivateEndpointConnection"]: + """Lists all private endpoint connections for a bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateEndpointConnection + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_endpoint_connections_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BookshelfPrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`bookshelf_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, bookshelf_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.BookshelfPrivateLinkResource: + """Gets the specified private link resource for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: BookshelfPrivateLinkResource. The BookshelfPrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.BookshelfPrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BookshelfPrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_bookshelf_private_link_resources_get_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.BookshelfPrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_bookshelf( + self, resource_group_name: str, bookshelf_name: str, **kwargs: Any + ) -> ItemPaged["_models.BookshelfPrivateLinkResource"]: + """Lists all private link resources for the bookshelf. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param bookshelf_name: The name of the Bookshelf. Required. + :type bookshelf_name: str + :return: An iterator like instance of BookshelfPrivateLinkResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.BookshelfPrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BookshelfPrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_bookshelf_private_link_resources_list_by_bookshelf_request( + resource_group_name=resource_group_name, + bookshelf_name=bookshelf_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.BookshelfPrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ToolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`tools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> _models.Tool: + """Get a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Tool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + + _request = build_tools_get_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Tool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_create_or_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + tool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, tool_name: str, resource: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Create a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param resource: Resource create parameters. Is one of the following types: Tool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_tools_update_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: _models.Tool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Tool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + tool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, resource_group_name: str, tool_name: str, properties: Union[_models.Tool, JSON, IO[bytes]], **kwargs: Any + ) -> LROPoller[_models.Tool]: + """Update a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :param properties: The resource properties to be updated. Is one of the following types: Tool, + JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Tool or JSON or IO[bytes] + :return: An instance of LROPoller that returns Tool. The Tool is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Tool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Tool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Tool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Tool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_tools_delete_request( + resource_group_name=resource_group_name, + tool_name=tool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, tool_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Tool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param tool_name: The name of the Tool. Required. + :type tool_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + tool_name=tool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Tool"]: + """List Tool resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Tool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Tool"]: + """List Tool resources by subscription ID. + + :return: An iterator like instance of Tool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Tool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Tool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_tools_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Tool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ProjectsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`projects` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any) -> _models.Project: + """Get a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: Project. The Project is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Project + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + + _request = build_projects_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Project, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + resource: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Create a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param resource: Resource create parameters. Is one of the following types: Project, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_projects_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: _models.Project, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Project + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + project_name: str, + properties: Union[_models.Project, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Project]: + """Update a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Project, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Project or JSON or IO[bytes] + :return: An instance of LROPoller that returns Project. The Project is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Project] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Project, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Project].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Project]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_projects_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, project_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a Project. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param project_name: The name of the Project. Required. + :type project_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + project_name=project_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.Project"]: + """List Project resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Project + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Project] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Project]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_projects_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`workspaces` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Get a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: Workspace. The Workspace is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + + _request = build_workspaces_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Workspace, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + resource: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Create a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param resource: Resource create parameters. Is one of the following types: Workspace, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspaces_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + properties: Union[_models.Workspace, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Update a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Workspace, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Workspace or JSON or IO[bytes] + :return: An instance of LROPoller that returns Workspace. The Workspace is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Workspace, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Workspace].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Workspace]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspaces_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Workspace"]: + """List Workspace resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Workspace + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Workspace"]: + """List Workspace resources by subscription ID. + + :return: An iterator like instance of Workspace + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Workspace]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspaces_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Workspace], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`workspace_private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: WorkspacePrivateEndpointConnection. The WorkspacePrivateEndpointConnection is + compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_workspace_private_endpoint_connections_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: _models.WorkspacePrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.WorkspacePrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.WorkspacePrivateEndpointConnection]: + """Approves or updates the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: Resource create parameters. Is one of the following types: + WorkspacePrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection or JSON or + IO[bytes] + :return: An instance of LROPoller that returns WorkspacePrivateEndpointConnection. The + WorkspacePrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspacePrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.WorkspacePrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.WorkspacePrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.WorkspacePrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_workspace_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes the specified private endpoint connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.WorkspacePrivateEndpointConnection"]: + """Lists all private endpoint connections for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateEndpointConnection + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_endpoint_connections_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class ChatModelDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`chat_model_deployments` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> _models.ChatModelDeployment: + """Get a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: ChatModelDeployment. The ChatModelDeployment is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.ChatModelDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + resource: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Create a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param resource: Resource create parameters. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_chat_model_deployments_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: _models.ChatModelDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + chat_model_deployment_name: str, + properties: Union[_models.ChatModelDeployment, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.ChatModelDeployment]: + """Update a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :param properties: The resource properties to be updated. Is one of the following types: + ChatModelDeployment, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.ChatModelDeployment or JSON or IO[bytes] + :return: An instance of LROPoller that returns ChatModelDeployment. The ChatModelDeployment is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChatModelDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.ChatModelDeployment, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.ChatModelDeployment].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.ChatModelDeployment]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_chat_model_deployments_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, chat_model_deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a ChatModelDeployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param chat_model_deployment_name: The name of the ChatModelDeployment. Required. + :type chat_model_deployment_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + chat_model_deployment_name=chat_model_deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.ChatModelDeployment"]: + """List ChatModelDeployment resources by Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of ChatModelDeployment + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.ChatModelDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.ChatModelDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_chat_model_deployments_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.ChatModelDeployment], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class WorkspacePrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`workspace_private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.WorkspacePrivateLinkResource: + """Gets the specified private link resource for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :param private_link_resource_name: The name of the private link associated with the Azure + resource. Required. + :type private_link_resource_name: str + :return: WorkspacePrivateLinkResource. The WorkspacePrivateLinkResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.discovery.models.WorkspacePrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.WorkspacePrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_workspace_private_link_resources_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.WorkspacePrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.WorkspacePrivateLinkResource"]: + """Lists all private link resources for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the Workspace. Required. + :type workspace_name: str + :return: An iterator like instance of WorkspacePrivateLinkResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.WorkspacePrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.WorkspacePrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_workspace_private_link_resources_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.WorkspacePrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class NodePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`node_pools` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> _models.NodePool: + """Get a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: NodePool. The NodePool is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.NodePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + + _request = build_node_pools_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.NodePool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + resource: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Create a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param resource: Resource create parameters. Is one of the following types: NodePool, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_node_pools_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: _models.NodePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.NodePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + node_pool_name: str, + properties: Union[_models.NodePool, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.NodePool]: + """Update a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :param properties: The resource properties to be updated. Is one of the following types: + NodePool, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.NodePool or JSON or IO[bytes] + :return: An instance of LROPoller that returns NodePool. The NodePool is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.NodePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.NodePool, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.NodePool].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.NodePool]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_node_pools_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, supercomputer_name: str, node_pool_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a NodePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param node_pool_name: The name of the NodePool. Required. + :type node_pool_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + node_pool_name=node_pool_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_supercomputer( + self, resource_group_name: str, supercomputer_name: str, **kwargs: Any + ) -> ItemPaged["_models.NodePool"]: + """List NodePool resources by Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An iterator like instance of NodePool + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.NodePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.NodePool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_node_pools_list_by_supercomputer_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.NodePool], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class SupercomputersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`supercomputers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> _models.Supercomputer: + """Get a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: Supercomputer. The Supercomputer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.Supercomputer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + + _request = build_supercomputers_get_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Supercomputer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_create_or_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + supercomputer_name: str, + resource: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Create a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param resource: Resource create parameters. Is one of the following types: Supercomputer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_supercomputers_update_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: _models.Supercomputer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + supercomputer_name: str, + properties: Union[_models.Supercomputer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.Supercomputer]: + """Update a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :param properties: The resource properties to be updated. Is one of the following types: + Supercomputer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.Supercomputer or JSON or IO[bytes] + :return: An instance of LROPoller that returns Supercomputer. The Supercomputer is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Supercomputer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.Supercomputer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.Supercomputer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.Supercomputer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_supercomputers_delete_request( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, supercomputer_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a Supercomputer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param supercomputer_name: The name of the Supercomputer. Required. + :type supercomputer_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + supercomputer_name=supercomputer_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.Supercomputer"]: + """List Supercomputer resources by subscription ID. + + :return: An iterator like instance of Supercomputer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.Supercomputer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Supercomputer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_supercomputers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Supercomputer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageAssetsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`storage_assets` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> _models.StorageAsset: + """Get a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: StorageAsset. The StorageAsset is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageAsset + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + + _request = build_storage_assets_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageAsset, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + resource: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Create a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param resource: Resource create parameters. Is one of the following types: StorageAsset, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_assets_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: _models.StorageAsset, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + storage_asset_name: str, + properties: Union[_models.StorageAsset, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageAsset]: + """Update a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageAsset, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageAsset or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageAsset. The StorageAsset is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageAsset] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageAsset, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageAsset].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageAsset]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_assets_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, storage_container_name: str, storage_asset_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a StorageAsset. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param storage_asset_name: The name of the StorageAsset. Required. + :type storage_asset_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + storage_asset_name=storage_asset_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_storage_container( + self, resource_group_name: str, storage_container_name: str, **kwargs: Any + ) -> ItemPaged["_models.StorageAsset"]: + """List StorageAsset resources by StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An iterator like instance of StorageAsset + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageAsset] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageAsset]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_assets_list_by_storage_container_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageAsset], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class StorageContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :attr:`storage_containers` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> _models.StorageContainer: + """Get a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: StorageContainer. The StorageContainer is compatible with MutableMapping + :rtype: ~azure.mgmt.discovery.models.StorageContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + + _request = build_storage_containers_get_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.StorageContainer, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_create_or_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + storage_container_name: str, + resource: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Create a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param resource: Resource create parameters. Is one of the following types: StorageContainer, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_storage_containers_update_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: _models.StorageContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + storage_container_name: str, + properties: Union[_models.StorageContainer, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.StorageContainer]: + """Update a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :param properties: The resource properties to be updated. Is one of the following types: + StorageContainer, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.discovery.models.StorageContainer or JSON or IO[bytes] + :return: An instance of LROPoller that returns StorageContainer. The StorageContainer is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StorageContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.StorageContainer, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.StorageContainer].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.StorageContainer]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_storage_containers_delete_request( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, storage_container_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a StorageContainer. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_container_name: The name of the StorageContainer. Required. + :type storage_container_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + storage_container_name=storage_container_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.StorageContainer"]: + """List StorageContainer resources by subscription ID. + + :return: An iterator like instance of StorageContainer + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.discovery.models.StorageContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.StorageContainer]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_storage_containers_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.StorageContainer], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt b/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt new file mode 100644 index 000000000000..ece056fe0984 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/dev_requirements.txt @@ -0,0 +1,5 @@ +-e ../../../eng/tools/azure-sdk-tools +../../core/azure-core +../../identity/azure-identity +../../core/azure-mgmt-core +aiohttp \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..e2f9e9ac94a1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name="rgdiscovery", + bookshelf_name="a65f3c23bf2baa5bd4", + private_endpoint_connection_name="connection", + resource={ + "properties": { + "privateEndpoint": {}, + "privateLinkServiceConnectionState": { + "actionsRequired": "vgqhrxvmviabfgmafqtbej", + "description": "lknyprq", + "status": "Pending", + }, + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py new file mode 100644 index 000000000000..c9a719dc6d4c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name="rgdiscovery", + bookshelf_name="9988c91bf62635cea5", + private_endpoint_connection_name="connection", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py new file mode 100644 index 000000000000..d224abee6d1f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.get( + resource_group_name="rgdiscovery", + bookshelf_name="ca2ea71fd0a5838c7f", + private_endpoint_connection_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py new file mode 100644 index 000000000000..9b462251578b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name="rgdiscovery", + bookshelf_name="d96263ffc8d8c904d4", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateEndpointConnections_ListByBookshelf_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py new file mode 100644 index 000000000000..0357315dab09 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_link_resources_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_link_resources.get( + resource_group_name="rgdiscovery", + bookshelf_name="9158657d63f4f9235f", + private_link_resource_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateLinkResources_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py new file mode 100644 index 000000000000..1c85ad91e02f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name="rgdiscovery", + bookshelf_name="cb4a7b7d5c4b6c3f78", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/BookshelfPrivateLinkResources_ListByBookshelf_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..920d43f6b064 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py @@ -0,0 +1,61 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.begin_create_or_update( + resource_group_name="rgdiscovery", + bookshelf_name="21b8f5a6a47fa1fdcc", + resource={ + "location": "uksouth", + "properties": { + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "identityClientId": "00000011-1111-2222-2222-123456789111", + "keyName": "tjjzitmclgtahulm", + "keyVaultUri": "https://microsoft.com/a", + "keyVersion": "dnoogjozeqlpubvkxwrujbncstsm", + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "privateEndpointSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/privateEndpointSubnet1", + "publicNetworkAccess": "Enabled", + "searchSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/searchSubnet1", + "workloadIdentities": {"key8334": {}}, + }, + "tags": {"key782": "hmvugqbu"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py new file mode 100644 index 000000000000..f87c11d33dc6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.bookshelves.begin_delete( + resource_group_name="rgdiscovery", + bookshelf_name="cdaa070c4d0ea7b9c9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py new file mode 100644 index 000000000000..70ef4e234ec1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.get( + resource_group_name="rgdiscovery", + bookshelf_name="85c2fc6e437c0b608b", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..149f8eae5cae --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..13842b343a4f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py new file mode 100644 index 000000000000..dc4dbe1d1600 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python bookshelves_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.bookshelves.begin_update( + resource_group_name="rgdiscovery", + bookshelf_name="c6189a7b33260c4a72", + properties={ + "properties": { + "keyVaultProperties": {"keyName": "b", "keyVersion": "kyf"}, + "publicNetworkAccess": "Enabled", + }, + "tags": {"key1792": "dnybouectwzjb"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Bookshelves_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..33e660ba1b87 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="eb2204766409e111d9", + chat_model_deployment_name="d1844ae17cc93bd299", + resource={ + "location": "uksouth", + "properties": {"modelFormat": "tcttsgevrsuflt", "modelName": "nvwdoluhukiachlyrdnpxusxsc"}, + "tags": {"key4822": "fpesmhjievwzxmhxszcgpztivcgw"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py new file mode 100644 index 000000000000..abaed9cae0f6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.chat_model_deployments.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="015403a79d07536250", + chat_model_deployment_name="7a1ee53e20d918a13d", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py new file mode 100644 index 000000000000..1a8de95ba342 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.get( + resource_group_name="rgdiscovery", + workspace_name="715bd6631a63225578", + chat_model_deployment_name="7938c93c6f61d31f7e", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..61061a168fe7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="715794cf970dc53142", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py new file mode 100644 index 000000000000..b20d727b632d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python chat_model_deployments_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.chat_model_deployments.begin_update( + resource_group_name="rgdiscovery", + workspace_name="438970fd7f0137032c", + chat_model_deployment_name="fd0837f1d866060b11", + properties={"properties": {}, "tags": {"key6223": "tvufnjfnrdadechkcyoboyrcme"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/ChatModelDeployments_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..77e678efa54d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.begin_create_or_update( + resource_group_name="rgdiscovery", + supercomputer_name="8074da5c77f95509a8", + node_pool_name="5a88c24ec4e7091650", + resource={ + "location": "uksouth", + "properties": { + "maxNodeCount": 4, + "minNodeCount": 0, + "scaleSetPriority": "Regular", + "subnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/subnet1", + "vmSize": "Standard_NC24ads_A100_v4", + }, + "tags": {"key6074": "qlnvwgazrqmwauqqvxntjtoye"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py new file mode 100644 index 000000000000..071e922722b4 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.node_pools.begin_delete( + resource_group_name="rgdiscovery", + supercomputer_name="6ddaf20b09c36fc7ef", + node_pool_name="6dcea29fcbc2279a3b", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py new file mode 100644 index 000000000000..5c92f9be70c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.get( + resource_group_name="rgdiscovery", + supercomputer_name="3d4fce3989a31db9c7", + node_pool_name="80084da43e5c8bc50e", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py new file mode 100644 index 000000000000..83673f49fe40 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_list_by_supercomputer_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.list_by_supercomputer( + resource_group_name="rgdiscovery", + supercomputer_name="7cc28f3db7c8fa0087", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_ListBySupercomputer_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py new file mode 100644 index 000000000000..2176048bf882 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python node_pools_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.node_pools.begin_update( + resource_group_name="rgdiscovery", + supercomputer_name="f674a0697b0c54044e", + node_pool_name="12ceb04d31658f1ec7", + properties={"properties": {"maxNodeCount": 21, "minNodeCount": 0}, "tags": {"key5366": "uyhhzfhedjkqanudogu"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/NodePools_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py new file mode 100644 index 000000000000..73094996d5e9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python operations_list_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Operations_List_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py new file mode 100644 index 000000000000..b39b5c36d6cc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python operations_list_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Operations_List_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..c076216732c8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py @@ -0,0 +1,56 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="1cf6d2a754acbb9655", + project_name="55cdd016d5653ebf82", + resource={ + "location": "uksouth", + "properties": { + "settings": { + "behaviorPreferences": "zjhzrtkwdmwbueseguplzpxeqvbqrknwvxxgtwtpldnguihcbprdwsihucbrmhyvmxuvldlgtrheqehrpcmdqtjknlyjnzdvphjicifbuvlsjgoaiaeunshmuupogxhwywntzhdvrdkknumgyofeltjqyenfiemerqsafaphhzgkwqrnuhbxklclishnnailctvcdjzvfroakitqkmllziocaolmyvytjmqhivljovriyicparifitswaynjsczcpfsgwyjsojiwqzauscpgmxqhznkofwydrjauiwkwkjrvclbufqmzyftfwjkymalhwkpiabljammjstpsknxqouabruobyznqnscucrvurarbbtefmaqiqfkyykuifojikmnkfgnjyagxwpjilfjyfpkqjdgrupitpqbvebmhsizeomzxqekqbsqqnlkefolbgbnfavtliixrvqxcbcxrxnpozucsvseddpjzsydiebyxxdehaniinfvfbflqwmyqelsjquigikebmfuuhdervonditfsummrsuokoqtessdmwptawejkqkkmtzgomamsbcbpviejbirvdwbcoenrsxeyayglvygsknetjuxdmbroritqklncrrnstwuaoohrqnypxfgbvfofsabnrgofobhdkktjyuhrxzmkrwglqczjlrfxbcbrplqmocjbphkjpdfxrfpkfrvlqqmiwftsuhxmjpimvngakbpkcvdevwubfqjjpnjmoiruabwxtzqehlwangolxjeqzjbfxltrgchuiginrgaeaztcqwacogzhvuhxcolvlzmoulikspebsdjyqlzgrtkqobkszfspnjftmnzsbyctxhkjsyemlehnqqhvvfdtrfarpjgaklmvbwukaykstipsnejnpnwaxskppilmgcdebupwpyyqajergevjizlhkiinvvqojrnegwbilhaktgjfkkefnwfbcxmjgylidekqjvgkxvnxdrttxzoyndupwqvlebbfgyiddgixrhcdbfkgakpausgcmjsgpqsjonrrrodgzzkworpvxzjzgtdbzqerqwjhjwoescduqwdwxsgmgyxolhlpdhuvscmzuuiynntspcyjlyngexpardjklyycnyquvbtfwlnqzxujowoljfvpnbyyoqiblxiiojiaeqpoblnrgttqmjqvqjaawevmbxgmbsaumjutbxhywkvhrljzcpitnunwfvtyiglqwlcngffnzdnfvccfqgdiazxoxgsepfqlyoxnofvogtsudsxarqpxhpidrwbykeypszgcbbyshljhlxperpdysagbmumexubjzimbetfayqfcbdkdpfsqrfuisioggdewxmeuihqitkwqfvmmwfppxfoigaoskgzedlhtmjagcbvwwnfkeyocceioccfjmxvtjutydhwapxkgdgjxuquujkdjtbkwhrlbxgclyqdcdnexitthelxajidteqtahvjwoovaripeuzycrdazmmzvpcecahnyentvdqdfteddmddcllromsumkucqhcgdelqsotimumwulnplxqeckhwgngafyvjlykdyotciabfkkdbqlfgwxkjreelzdswbaqzhiweqopubmxlestbvcrdimtiyleisfsvheanokuaipbniseceonfqlbrfofrhbuwtuirnrooflajdyrzwgcmpztgeyzrgvohijxrwrgsmfxagjuuygqtbyilneezxkmcbjoyhljlnesiuhgznvxzglpemwqfpixqlppehxeqzwhuxfcknkmpfttpywxaimzrsfalarhtoiwenlulyoadfdcyvsahzngcashxrchwrxknrvzhldforskqsktvltfuuxhummulwcfvezmfedfobzyfryrrzbypsvakkgppvhhicjydgzmnhcxsfqrdwgbmitmzzmjyzgvzusjigujfohqdlgmrtxmvvxoaxyfiyrgvfawkyynjykpmejzqmrpzitetxhqcjrqspglkzricplkrecaiumajvsobcddohclhmyinyteblkxqoqnoiyfwspencqszjbsnpyokusxgptshytgzqkynhncmqgrxaeugwyzxxqubfdkozfqrdeveopnhyvjcpjziogeyinlvcbhwltteivcnkfehvatnbvkfoqedljupaxholukhagwcasdgagdhpmkiumdclzstyexknhlojeoqkuejmnbuhdwgskgbqfomxvkgziun" + }, + "storageContainerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/storageContainers/storageContainer12" + ], + }, + "tags": {"key717": "tdn"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py new file mode 100644 index 000000000000..b2c5d06cd450 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.projects.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="5020af62f469b308c0", + project_name="9ae1e783de71d4e949", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Projects_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py new file mode 100644 index 000000000000..c3179d81d311 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.get( + resource_group_name="rgdiscovery", + workspace_name="aa8419d5add7095abd", + project_name="4884cf65356e9c3489", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..7af1aba95e82 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="56c2d23d65c9121656", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Projects_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py new file mode 100644 index 000000000000..9bf5bbb8a372 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python projects_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.projects.begin_update( + resource_group_name="rgdiscovery", + workspace_name="11e14e4bec2ea791b4", + project_name="1e7dd7aa730b25cabf", + properties={ + "properties": { + "settings": { + "behaviorPreferences": "zjhzrtkwdmwbueseguplzpxeqvbqrknwvxxgtwtpldnguihcbprdwsihucbrmhyvmxuvldlgtrheqehrpcmdqtjknlyjnzdvphjicifbuvlsjgoaiaeunshmuupogxhwywntzhdvrdkknumgyofeltjqyenfiemerqsafaphhzgkwqrnuhbxklclishnnailctvcdjzvfroakitqkmllziocaolmyvytjmqhivljovriyicparifitswaynjsczcpfsgwyjsojiwqzauscpgmxqhznkofwydrjauiwkwkjrvclbufqmzyftfwjkymalhwkpiabljammjstpsknxqouabruobyznqnscucrvurarbbtefmaqiqfkyykuifojikmnkfgnjyagxwpjilfjyfpkqjdgrupitpqbvebmhsizeomzxqekqbsqqnlkefolbgbnfavtliixrvqxcbcxrxnpozucsvseddpjzsydiebyxxdehaniinfvfbflqwmyqelsjquigikebmfuuhdervonditfsummrsuokoqtessdmwptawejkqkkmtzgomamsbcbpviejbirvdwbcoenrsxeyayglvygsknetjuxdmbroritqklncrrnstwuaoohrqnypxfgbvfofsabnrgofobhdkktjyuhrxzmkrwglqczjlrfxbcbrplqmocjbphkjpdfxrfpkfrvlqqmiwftsuhxmjpimvngakbpkcvdevwubfqjjpnjmoiruabwxtzqehlwangolxjeqzjbfxltrgchuiginrgaeaztcqwacogzhvuhxcolvlzmoulikspebsdjyqlzgrtkqobkszfspnjftmnzsbyctxhkjsyemlehnqqhvvfdtrfarpjgaklmvbwukaykstipsnejnpnwaxskppilmgcdebupwpyyqajergevjizlhkiinvvqojrnegwbilhaktgjfkkefnwfbcxmjgylidekqjvgkxvnxdrttxzoyndupwqvlebbfgyiddgixrhcdbfkgakpausgcmjsgpqsjonrrrodgzzkworpvxzjzgtdbzqerqwjhjwoescduqwdwxsgmgyxolhlpdhuvscmzuuiynntspcyjlyngexpardjklyycnyquvbtfwlnqzxujowoljfvpnbyyoqiblxiiojiaeqpoblnrgttqmjqvqjaawevmbxgmbsaumjutbxhywkvhrljzcpitnunwfvtyiglqwlcngffnzdnfvccfqgdiazxoxgsepfqlyoxnofvogtsudsxarqpxhpidrwbykeypszgcbbyshljhlxperpdysagbmumexubjzimbetfayqfcbdkdpfsqrfuisioggdewxmeuihqitkwqfvmmwfppxfoigaoskgzedlhtmjagcbvwwnfkeyocceioccfjmxvtjutydhwapxkgdgjxuquujkdjtbkwhrlbxgclyqdcdnexitthelxajidteqtahvjwoovaripeuzycrdazmmzvpcecahnyentvdqdfteddmddcllromsumkucqhcgdelqsotimumwulnplxqeckhwgngafyvjlykdyotciabfkkdbqlfgwxkjreelzdswbaqzhiweqopubmxlestbvcrdimtiyleisfsvheanokuaipbniseceonfqlbrfofrhbuwtuirnrooflajdyrzwgcmpztgeyzrgvohijxrwrgsmfxagjuuygqtbyilneezxkmcbjoyhljlnesiuhgznvxzglpemwqfpixqlppehxeqzwhuxfcknkmpfttpywxaimzrsfalarhtoiwenlulyoadfdcyvsahzngcashxrchwrxknrvzhldforskqsktvltfuuxhummulwcfvezmfedfobzyfryrrzbypsvakkgppvhhicjydgzmnhcxsfqrdwgbmitmzzmjyzgvzusjigujfohqdlgmrtxmvvxoaxyfiyrgvfawkyynjykpmejzqmrpzitetxhqcjrqspglkzricplkrecaiumajvsobcddohclhmyinyteblkxqoqnoiyfwspencqszjbsnpyokusxgptshytgzqkynhncmqgrxaeugwyzxxqubfdkozfqrdeveopnhyvjcpjziogeyinlvcbhwltteivcnkfehvatnbvkfoqedljupaxholukhagwcasdgagdhpmkiumdclzstyexknhlojeoqkuejmnbuhdwgskgbqfomxvkgziun" + }, + "storageContainerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/storageContainers/storageContainer12" + ], + }, + "tags": {"key2596": "kpfaeffbnzkz"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Projects_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..ffa9f94615f3 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.begin_create_or_update( + resource_group_name="rgdiscovery", + storage_container_name="106b8981ac9ca95890", + storage_asset_name="8fd30c31448f7b0f1a", + resource={ + "location": "uksouth", + "properties": {"description": "gwlk", "path": "qmvrklgqdif"}, + "tags": {"key5443": "dneh"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py new file mode 100644 index 000000000000..4b6a8da8cfed --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.storage_assets.begin_delete( + resource_group_name="rgdiscovery", + storage_container_name="f7e7a03c675ccffe1a", + storage_asset_name="8f741ee4588dc823fc", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py new file mode 100644 index 000000000000..2369e4aaad2c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.get( + resource_group_name="rgdiscovery", + storage_container_name="edde0a4a016d7d6b2b", + storage_asset_name="5ea4bb40f40e2ef5e2", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py new file mode 100644 index 000000000000..35c8b92a8742 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_list_by_storage_container_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.list_by_storage_container( + resource_group_name="rgdiscovery", + storage_container_name="6b4fbcbb65873f18bf", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_ListByStorageContainer_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py new file mode 100644 index 000000000000..0f845a2880ac --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_assets_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_assets.begin_update( + resource_group_name="rgdiscovery", + storage_container_name="d177d30241e3f8a27d", + storage_asset_name="6cd8920c03970ccdfe", + properties={"properties": {"description": "tljmqqr"}, "tags": {"key5822": "jicwkfdyoqvgpoy"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageAssets_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..72e2f118f656 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.begin_create_or_update( + resource_group_name="rgdiscovery", + storage_container_name="23ae33a54872c83164", + resource={ + "location": "uksouth", + "properties": { + "storageStore": { + "kind": "AzureStorageBlob", + "storageAccountId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/storageaccount", + } + }, + "tags": {"key9976": "waghigmzxlvfqwribpxamwx"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py new file mode 100644 index 000000000000..8c01e23707a7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.storage_containers.begin_delete( + resource_group_name="rgdiscovery", + storage_container_name="861edbda8228e6d8c9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py new file mode 100644 index 000000000000..36a70da711b4 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.get( + resource_group_name="rgdiscovery", + storage_container_name="8f3eba3d81d78de900", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..18c3830b3c46 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..adcd95999d69 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py new file mode 100644 index 000000000000..701ce91f8af7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python storage_containers_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.storage_containers.begin_update( + resource_group_name="rgdiscovery", + storage_container_name="5c26ac8738c893ec11", + properties={"properties": {}, "tags": {"key5909": "hdhfnp"}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/StorageContainers_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..6dfc27a288aa --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py @@ -0,0 +1,65 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.begin_create_or_update( + resource_group_name="rgdiscovery", + supercomputer_name="85fd61f68e7207bbd3", + resource={ + "location": "uksouth", + "properties": { + "customerManagedKeys": "Enabled", + "diskEncryptionSetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Compute/diskEncryptionSets/diskencryptionset1", + "identities": { + "clusterIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "kubeletIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "workloadIdentities": {"key1149": {}}, + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "managementSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/managementSubnet1", + "outboundType": "LoadBalancer", + "subnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/subnet1", + "systemSku": "Standard_D4s_v6", + }, + "tags": {"key5625": "spcjwrxnslfkiqbzdkhhbano"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py new file mode 100644 index 000000000000..406a01e13889 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.supercomputers.begin_delete( + resource_group_name="rgdiscovery", + supercomputer_name="44f7621cf75873fb53", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py new file mode 100644 index 000000000000..0342e2b93d69 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.get( + resource_group_name="rgdiscovery", + supercomputer_name="b6807d2513b2fdb240", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..c30b2f770dd1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..3887643ce905 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py new file mode 100644 index 000000000000..cff377e47d05 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python supercomputers_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.supercomputers.begin_update( + resource_group_name="rgdiscovery", + supercomputer_name="a60016ec51d9d8e35d", + properties={ + "properties": {"identities": {"workloadIdentities": {"key3032": {}}}}, + "tags": {"key9318": "xawwf"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Supercomputers_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..24c122430d83 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py @@ -0,0 +1,88 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.begin_create_or_update( + resource_group_name="rgdiscovery", + tool_name="b5d4239f788c20b58b", + resource={ + "location": "uksouth", + "properties": { + "definitionContent": { + "actions": [ + { + "command": "python3 submit_dft.py ", + "description": "Optimize geometry of 'xyz's from the input data asset. This is a prerequisite for all other discovery computations.", + "environment_variables": [ + {"name": "OUTPUT_DIRECTORY_PATH", "value": "{{ outputDataAssetId }}"} + ], + "input_schema": { + "properties": { + "basisSet": { + "description": "Basis set. Must be one of the supported basis sets (e.g., def2-svp, def2-tzvp).", + "type": "string", + }, + "inputDataAssetId": { + "description": "Identifier of the input data asset", + "type": "string", + }, + "outputDataAssetId": { + "description": "Identifier to use for the new output data asset which will be created.", + "type": "string", + }, + "xyzColumnName": { + "description": "Column containing xyz data within the input data table asset", + "type": "string", + }, + }, + "required": ["inputDataAssetId", "xyzColumnName"], + "type": "object", + }, + "name": "GeometryOptimization", + } + ], + "description": "Advanced DFT computational tools for molecular geometry optimization and property calculations", + "name": "discovery", + "tool_id": "discovery-m1", + }, + "environmentVariables": {"key5460": "xtjzjghbist"}, + "version": "sjepxewtq", + }, + "tags": {"key2611": "cgsblxvyzevbd"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py new file mode 100644 index 000000000000..31c411792c4d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.tools.begin_delete( + resource_group_name="rgdiscovery", + tool_name="d0e8e07484db1bb9a9", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Tools_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py new file mode 100644 index 000000000000..3fd5e3176f41 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.get( + resource_group_name="rgdiscovery", + tool_name="30ebfda6785888d26f", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..90da28f92a56 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Tools_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..a428d058d2e5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Tools_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py new file mode 100644 index 000000000000..0bdd0a8002b9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py @@ -0,0 +1,87 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python tools_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.tools.begin_update( + resource_group_name="rgdiscovery", + tool_name="f1972d0fc9531d424c", + properties={ + "properties": { + "definitionContent": { + "actions": [ + { + "command": "python3 submit_dft.py ", + "description": "Optimize geometry of 'xyz's from the input data asset. This is a prerequisite for all other discovery computations.", + "environment_variables": [ + {"name": "OUTPUT_DIRECTORY_PATH", "value": "{{ outputDataAssetId }}"} + ], + "input_schema": { + "properties": { + "basisSet": { + "description": "Basis set. Must be one of the supported basis sets (e.g., def2-svp, def2-tzvp).", + "type": "string", + }, + "inputDataAssetId": { + "description": "Identifier of the input data asset", + "type": "string", + }, + "outputDataAssetId": { + "description": "Identifier to use for the new output data asset which will be created.", + "type": "string", + }, + "xyzColumnName": { + "description": "Column containing xyz data within the input data table asset", + "type": "string", + }, + }, + "required": ["inputDataAssetId", "xyzColumnName"], + "type": "object", + }, + "name": "GeometryOptimization", + } + ], + "description": "Advanced DFT computational tools for molecular geometry optimization and property calculations", + "name": "discovery", + "tool_id": "discovery-m1", + }, + "environmentVariables": {"key3840": "snaxrtkryhwqw"}, + "version": "jittnzvso", + }, + "tags": {"key4187": "vbirsnehukndlpioqtsmqyoqhklg"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Tools_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..92fd9fc0afcd --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="2b2ef1dfc273d99493", + private_endpoint_connection_name="connection", + resource={ + "properties": { + "privateEndpoint": {}, + "privateLinkServiceConnectionState": { + "actionsRequired": "vgqhrxvmviabfgmafqtbej", + "description": "lknyprq", + "status": "Pending", + }, + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py new file mode 100644 index 000000000000..49c576e042da --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.workspace_private_endpoint_connections.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="1e2a3df721db9f3406", + private_endpoint_connection_name="connection", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py new file mode 100644 index 000000000000..eecd6bb9f1ac --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.get( + resource_group_name="rgdiscovery", + workspace_name="16e7096454e0394819", + private_endpoint_connection_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..4078f65004ba --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="cc28db0ff1bebbe39b", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateEndpointConnections_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py new file mode 100644 index 000000000000..3e318f0033ec --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_link_resources_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_link_resources.get( + resource_group_name="rgdiscovery", + workspace_name="68b05b24fa2cc1a943", + private_link_resource_name="connection", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateLinkResources_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py new file mode 100644 index 000000000000..798ab14432f5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspace_private_link_resources_list_by_workspace_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspace_private_link_resources.list_by_workspace( + resource_group_name="rgdiscovery", + workspace_name="3a737dc9780bdefdff", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/WorkspacePrivateLinkResources_ListByWorkspace_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..04c6770373e3 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py @@ -0,0 +1,66 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rgdiscovery", + workspace_name="b8d58cd85996a6dea3", + resource={ + "location": "uksouth", + "properties": { + "agentSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/agentSubnet1", + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyName": "yfplarzdfwsut", + "keyVaultUri": "https://microsoft.com/a", + "keyVersion": "qlsjcf", + }, + "logAnalyticsClusterId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.OperationalInsights/clusters/cluster1", + "privateEndpointSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/privateEndpointSubnet1", + "publicNetworkAccess": "Enabled", + "supercomputerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/supercomputers/supercomputer12" + ], + "workspaceIdentity": { + "id": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.ManagedIdentity/userAssignedIdentities/managedid1" + }, + "workspaceSubnetId": "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/providers/Microsoft.Network/virtualNetworks/virtualnetwork1/subnets/workspaceSubnet1", + }, + "tags": {"key5364": "xiwdefebkbfffgqlzmqaeqsqeq"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py new file mode 100644 index 000000000000..987e9602cafb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.workspaces.begin_delete( + resource_group_name="rgdiscovery", + workspace_name="f1559ab1ef72a2eae5", + ).result() + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py new file mode 100644 index 000000000000..8468a78b7fab --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.get( + resource_group_name="rgdiscovery", + workspace_name="0e6a06e55e7efe8f07", + ) + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py new file mode 100644 index 000000000000..3a5c9863a67b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_list_by_resource_group_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.list_by_resource_group( + resource_group_name="rgdiscovery", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_ListByResourceGroup_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..3d4beb74251a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py new file mode 100644 index 000000000000..6dfc35655023 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py @@ -0,0 +1,56 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.discovery import DiscoveryClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-discovery +# USAGE + python workspaces_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.workspaces.begin_update( + resource_group_name="rgdiscovery", + workspace_name="43ac331aecf462b646", + properties={ + "properties": { + "keyVaultProperties": { + "keyName": "oxxinrlglrdihfqjrpkjc", + "keyVersion": "xbvilcphokrwachseulvwywaekfh", + }, + "publicNetworkAccess": "Enabled", + "supercomputerIds": [ + "/subscriptions/31735C59-6307-4464-8B80-3675223F23D2/resourceGroups/rgdiscovery/providers/Microsoft.Discovery/supercomputers/supercomputer12" + ], + }, + "tags": {"key6612": "ca"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-02-01-preview/Workspaces_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py new file mode 100644 index 000000000000..7ba6ee889813 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + discovery_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + discovery_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + discovery_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + discovery_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discovery_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discovery_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discovery_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discovery_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..740087be3f1b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelfPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_get(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_begin_delete(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_endpoint_connections_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..3354abbfdb64 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelfPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_get(self, resource_group): + response = await self.client.bookshelf_private_endpoint_connections.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = await ( + await self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_begin_delete(self, resource_group): + response = await ( + await self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_endpoint_connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_endpoint_connections_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py new file mode 100644 index 000000000000..f4466a757f47 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelfPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_link_resources_get(self, resource_group): + response = self.client.bookshelf_private_link_resources.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelf_private_link_resources_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py new file mode 100644 index 000000000000..95007b53571b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelfPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_link_resources_get(self, resource_group): + response = await self.client.bookshelf_private_link_resources.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelf_private_link_resources_list_by_bookshelf(self, resource_group): + response = self.client.bookshelf_private_link_resources.list_by_bookshelf( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py new file mode 100644 index 000000000000..45a22b2f09d1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py @@ -0,0 +1,197 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelvesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_get(self, resource_group): + response = self.client.bookshelves.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_create_or_update(self, resource_group): + response = self.client.bookshelves.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_update(self, resource_group): + response = self.client.bookshelves.begin_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_begin_delete(self, resource_group): + response = self.client.bookshelves.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_list_by_resource_group(self, resource_group): + response = self.client.bookshelves.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_bookshelves_list_by_subscription(self, resource_group): + response = self.client.bookshelves.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py new file mode 100644 index 000000000000..17e9e09b7c43 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py @@ -0,0 +1,204 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryBookshelvesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_get(self, resource_group): + response = await self.client.bookshelves.get( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_create_or_update(self, resource_group): + response = await ( + await self.client.bookshelves.begin_create_or_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_update(self, resource_group): + response = await ( + await self.client.bookshelves.begin_update( + resource_group_name=resource_group.name, + bookshelf_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "bookshelfUri": "str", + "customerManagedKeys": "str", + "keyVaultProperties": { + "identityClientId": "str", + "keyName": "str", + "keyVaultUri": "str", + "keyVersion": "str", + }, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "searchSubnetId": "str", + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_begin_delete(self, resource_group): + response = await ( + await self.client.bookshelves.begin_delete( + resource_group_name=resource_group.name, + bookshelf_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_list_by_resource_group(self, resource_group): + response = self.client.bookshelves.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_bookshelves_list_by_subscription(self, resource_group): + response = self.client.bookshelves.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py new file mode 100644 index 000000000000..7f3332215ea0 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryChatModelDeploymentsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_get(self, resource_group): + response = self.client.chat_model_deployments.get( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_create_or_update(self, resource_group): + response = self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_update(self, resource_group): + response = self.client.chat_model_deployments.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_begin_delete(self, resource_group): + response = self.client.chat_model_deployments.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_chat_model_deployments_list_by_workspace(self, resource_group): + response = self.client.chat_model_deployments.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py new file mode 100644 index 000000000000..8d27d002cc8f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryChatModelDeploymentsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_get(self, resource_group): + response = await self.client.chat_model_deployments.get( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_create_or_update(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_update(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"modelFormat": "str", "modelName": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_begin_delete(self, resource_group): + response = await ( + await self.client.chat_model_deployments.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + chat_model_deployment_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_chat_model_deployments_list_by_workspace(self, resource_group): + response = self.client.chat_model_deployments.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py new file mode 100644 index 000000000000..0f68af30f8c9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryNodePoolsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_get(self, resource_group): + response = self.client.node_pools.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_create_or_update(self, resource_group): + response = self.client.node_pools.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_update(self, resource_group): + response = self.client.node_pools.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_begin_delete(self, resource_group): + response = self.client.node_pools.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_node_pools_list_by_supercomputer(self, resource_group): + response = self.client.node_pools.list_by_supercomputer( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py new file mode 100644 index 000000000000..f7b6f5384a68 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryNodePoolsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_get(self, resource_group): + response = await self.client.node_pools.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_create_or_update(self, resource_group): + response = await ( + await self.client.node_pools.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_update(self, resource_group): + response = await ( + await self.client.node_pools.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "maxNodeCount": 0, + "subnetId": "str", + "vmSize": "str", + "minNodeCount": 0, + "provisioningState": "str", + "scaleSetPriority": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_begin_delete(self, resource_group): + response = await ( + await self.client.node_pools.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + node_pool_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_node_pools_list_by_supercomputer(self, resource_group): + response = self.client.node_pools.list_by_supercomputer( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py new file mode 100644 index 000000000000..2190847347aa --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py new file mode 100644 index 000000000000..4d76e32a16dc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py new file mode 100644 index 000000000000..5571d5c4f8c5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryProjectsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_get(self, resource_group): + response = self.client.projects.get( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_create_or_update(self, resource_group): + response = self.client.projects.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_update(self, resource_group): + response = self.client.projects.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_begin_delete(self, resource_group): + response = self.client.projects.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_projects_list_by_workspace(self, resource_group): + response = self.client.projects.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py new file mode 100644 index 000000000000..43b0c90c0639 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryProjectsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_get(self, resource_group): + response = await self.client.projects.get( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_create_or_update(self, resource_group): + response = await ( + await self.client.projects.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_update(self, resource_group): + response = await ( + await self.client.projects.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "foundryProjectEndpoint": "str", + "provisioningState": "str", + "settings": {"behaviorPreferences": "str"}, + "storageContainerIds": ["str"], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_begin_delete(self, resource_group): + response = await ( + await self.client.projects.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + project_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_projects_list_by_workspace(self, resource_group): + response = self.client.projects.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py new file mode 100644 index 000000000000..ff7993dc6c6a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryStorageAssetsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_get(self, resource_group): + response = self.client.storage_assets.get( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_create_or_update(self, resource_group): + response = self.client.storage_assets.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_update(self, resource_group): + response = self.client.storage_assets.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_begin_delete(self, resource_group): + response = self.client.storage_assets.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_assets_list_by_storage_container(self, resource_group): + response = self.client.storage_assets.list_by_storage_container( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py new file mode 100644 index 000000000000..d7ba3657d591 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryStorageAssetsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_get(self, resource_group): + response = await self.client.storage_assets.get( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_create_or_update(self, resource_group): + response = await ( + await self.client.storage_assets.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_update(self, resource_group): + response = await ( + await self.client.storage_assets.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"description": "str", "path": "str", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_begin_delete(self, resource_group): + response = await ( + await self.client.storage_assets.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + storage_asset_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_assets_list_by_storage_container(self, resource_group): + response = self.client.storage_assets.list_by_storage_container( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py new file mode 100644 index 000000000000..2e46246149a9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryStorageContainersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_get(self, resource_group): + response = self.client.storage_containers.get( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_create_or_update(self, resource_group): + response = self.client.storage_containers.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_update(self, resource_group): + response = self.client.storage_containers.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_begin_delete(self, resource_group): + response = self.client.storage_containers.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_list_by_resource_group(self, resource_group): + response = self.client.storage_containers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_storage_containers_list_by_subscription(self, resource_group): + response = self.client.storage_containers.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py new file mode 100644 index 000000000000..d35efe6ad6c5 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryStorageContainersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_get(self, resource_group): + response = await self.client.storage_containers.get( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_create_or_update(self, resource_group): + response = await ( + await self.client.storage_containers.begin_create_or_update( + resource_group_name=resource_group.name, + storage_container_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_update(self, resource_group): + response = await ( + await self.client.storage_containers.begin_update( + resource_group_name=resource_group.name, + storage_container_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": {"storageStore": "storage_store", "provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_begin_delete(self, resource_group): + response = await ( + await self.client.storage_containers.begin_delete( + resource_group_name=resource_group.name, + storage_container_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_list_by_resource_group(self, resource_group): + response = self.client.storage_containers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_storage_containers_list_by_subscription(self, resource_group): + response = self.client.storage_containers.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py new file mode 100644 index 000000000000..4032de062875 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py @@ -0,0 +1,145 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoverySupercomputersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_get(self, resource_group): + response = self.client.supercomputers.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_create_or_update(self, resource_group): + response = self.client.supercomputers.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_update(self, resource_group): + response = self.client.supercomputers.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_begin_delete(self, resource_group): + response = self.client.supercomputers.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_list_by_resource_group(self, resource_group): + response = self.client.supercomputers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_supercomputers_list_by_subscription(self, resource_group): + response = self.client.supercomputers.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py new file mode 100644 index 000000000000..d3bacbfcf71a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoverySupercomputersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_get(self, resource_group): + response = await self.client.supercomputers.get( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_create_or_update(self, resource_group): + response = await ( + await self.client.supercomputers.begin_create_or_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_update(self, resource_group): + response = await ( + await self.client.supercomputers.begin_update( + resource_group_name=resource_group.name, + supercomputer_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "identities": { + "clusterIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "kubeletIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "workloadIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "subnetId": "str", + "customerManagedKeys": "str", + "diskEncryptionSetId": "str", + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "managementSubnetId": "str", + "outboundType": "str", + "provisioningState": "str", + "systemSku": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_begin_delete(self, resource_group): + response = await ( + await self.client.supercomputers.begin_delete( + resource_group_name=resource_group.name, + supercomputer_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_list_by_resource_group(self, resource_group): + response = self.client.supercomputers.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_supercomputers_list_by_subscription(self, resource_group): + response = self.client.supercomputers.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py new file mode 100644 index 000000000000..7e4ff1279741 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryToolsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_get(self, resource_group): + response = self.client.tools.get( + resource_group_name=resource_group.name, + tool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_create_or_update(self, resource_group): + response = self.client.tools.begin_create_or_update( + resource_group_name=resource_group.name, + tool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_update(self, resource_group): + response = self.client.tools.begin_update( + resource_group_name=resource_group.name, + tool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_begin_delete(self, resource_group): + response = self.client.tools.begin_delete( + resource_group_name=resource_group.name, + tool_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_list_by_resource_group(self, resource_group): + response = self.client.tools.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_tools_list_by_subscription(self, resource_group): + response = self.client.tools.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py new file mode 100644 index 000000000000..2617d3497b56 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py @@ -0,0 +1,130 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryToolsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_get(self, resource_group): + response = await self.client.tools.get( + resource_group_name=resource_group.name, + tool_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_create_or_update(self, resource_group): + response = await ( + await self.client.tools.begin_create_or_update( + resource_group_name=resource_group.name, + tool_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_update(self, resource_group): + response = await ( + await self.client.tools.begin_update( + resource_group_name=resource_group.name, + tool_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "definitionContent": {"str": {}}, + "version": "str", + "environmentVariables": {"str": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_begin_delete(self, resource_group): + response = await ( + await self.client.tools.begin_delete( + resource_group_name=resource_group.name, + tool_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_list_by_resource_group(self, resource_group): + response = self.client.tools.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_tools_list_by_subscription(self, resource_group): + response = self.client.tools.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..160ed09ba273 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacePrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_get(self, resource_group): + response = self.client.workspace_private_endpoint_connections.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_begin_delete(self, resource_group): + response = self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_endpoint_connections_list_by_workspace(self, resource_group): + response = self.client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..2402290128ff --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacePrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_get(self, resource_group): + response = await self.client.workspace_private_endpoint_connections.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_begin_create_or_update(self, resource_group): + response = await ( + await self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_begin_delete(self, resource_group): + response = await ( + await self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + private_endpoint_connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_endpoint_connections_list_by_workspace(self, resource_group): + response = self.client.workspace_private_endpoint_connections.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py new file mode 100644 index 000000000000..672f2facc64c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacePrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_link_resources_get(self, resource_group): + response = self.client.workspace_private_link_resources.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspace_private_link_resources_list_by_workspace(self, resource_group): + response = self.client.workspace_private_link_resources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py new file mode 100644 index 000000000000..2275cb83c63c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacePrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_link_resources_get(self, resource_group): + response = await self.client.workspace_private_link_resources.get( + resource_group_name=resource_group.name, + workspace_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspace_private_link_resources_list_by_workspace(self, resource_group): + response = self.client.workspace_private_link_resources.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py new file mode 100644 index 000000000000..bccd1dfdde01 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py @@ -0,0 +1,193 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_get(self, resource_group): + response = self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_create_or_update(self, resource_group): + response = self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_update(self, resource_group): + response = self.client.workspaces.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_delete(self, resource_group): + response = self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_list_by_subscription(self, resource_group): + response = self.client.workspaces.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py new file mode 100644 index 000000000000..594aa5967503 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py @@ -0,0 +1,200 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.discovery.aio import DiscoveryClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDiscoveryWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_get(self, resource_group): + response = await self.client.workspaces.get( + resource_group_name=resource_group.name, + workspace_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_create_or_update(self, resource_group): + response = await ( + await self.client.workspaces.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_update(self, resource_group): + response = await ( + await self.client.workspaces.begin_update( + resource_group_name=resource_group.name, + workspace_name="str", + properties={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "workspaceIdentity": {"id": "str", "clientId": "str", "principalId": "str"}, + "agentSubnetId": "str", + "customerManagedKeys": "str", + "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"}, + "logAnalyticsClusterId": "str", + "managedOnBehalfOfConfiguration": {"moboBrokerResources": [{"id": "str"}]}, + "managedResourceGroup": "str", + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "privateEndpointSubnetId": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "supercomputerIds": ["str"], + "workspaceApiUri": "str", + "workspaceSubnetId": "str", + "workspaceUiUri": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_delete(self, resource_group): + response = await ( + await self.client.workspaces.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_list_by_resource_group(self, resource_group): + response = self.client.workspaces.list_by_resource_group( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_list_by_subscription(self, resource_group): + response = self.client.workspaces.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/discovery/azure-mgmt-discovery/pyproject.toml b/sdk/discovery/azure-mgmt-discovery/pyproject.toml new file mode 100644 index 000000000000..bddd92973a29 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/pyproject.toml @@ -0,0 +1,86 @@ +[build-system] +requires = [ + "setuptools>=77.0.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-mgmt-discovery" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Azure Discovery Management Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = [ + "azure", + "azure sdk", +] +dependencies = [ + "isodate>=0.6.1", + "azure-mgmt-core>=1.6.0", + "typing-extensions>=4.6.0", +] +dynamic = [ + "version", + "readme", +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic.version] +attr = "azure.mgmt.discovery._version.VERSION" + +[tool.setuptools.dynamic.readme] +file = [ + "README.md", + "CHANGELOG.md", +] +content-type = "text/markdown" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.mgmt", +] + +[tool.setuptools.package-data] +pytyped = [ + "py.typed", +] + +[tool.azure-sdk-build] +breaking = false +pyright = false +mypy = false + +[packaging] +package_name = "azure-mgmt-discovery" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "Discovery Management" +package_doc_id = "" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true +sample_link = "" +exclude_folders = "" +title = "DiscoveryClient" diff --git a/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml new file mode 100644 index 000000000000..7a15b77623fb --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml @@ -0,0 +1,8 @@ +directory: specification/discovery/Discovery.Management +commit: 74cc90c49189a079b3cc93fde9c9ad76742f0184 +repo: Azure/azure-rest-api-specs +additionalDirectories: +- specification/discovery/Discovery.Management.Shared +- specification/discovery/Discovery.Bookshelf.Management +- specification/discovery/Discovery.Supercomputer.Management +- specification/discovery/Discovery.Workspace.Management diff --git a/sdk/discovery/ci.yml b/sdk/discovery/ci.yml new file mode 100644 index 000000000000..148300d84180 --- /dev/null +++ b/sdk/discovery/ci.yml @@ -0,0 +1,34 @@ +# DO NOT EDIT THIS FILE +# This file is generated automatically and any changes will be lost. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/discovery/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/discovery/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: discovery + TestProxy: true + Artifacts: + - name: azure-mgmt-discovery + safeName: azuremgmtdiscovery From 826ed4c92165ecd699c27770d1a0c2a0acb8acb5 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Sat, 7 Mar 2026 16:11:56 +0000 Subject: [PATCH 2/9] Configurations: 'specification/discovery/Discovery.Management/tspconfig.yaml', API Version: 2026-02-01-preview, SDK Release Type: beta, and CommitSHA: '74cc90c49189a079b3cc93fde9c9ad76742f0184' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=5977415 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. --- sdk/discovery/azure-mgmt-discovery/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md index 04110fc84c4b..b1ff2b1ec490 100644 --- a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md +++ b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md @@ -1,6 +1,6 @@ # Release History -## 1.0.0b1 (2026-03-06) +## 1.0.0b1 (2026-03-07) ### Other Changes From c6fbf9f32a88ba5170fc55732ee7014681f8e145 Mon Sep 17 00:00:00 2001 From: Oluwaseyi Lawal Date: Sat, 7 Mar 2026 11:29:41 -0600 Subject: [PATCH 3/9] Add live tests, test recordings, and CODEOWNERS entry from discovery SDK development --- .github/CODEOWNERS | 6 +- .../azure-mgmt-discovery/tests/__init__.py | 4 + .../azure-mgmt-discovery/tests/conftest.py | 34 + ...yTestBookshelvestest_create_bookshelf.json | 70 + ...yTestBookshelvestest_delete_bookshelf.json | 32 + ...s.pyTestBookshelvestest_get_bookshelf.json | 62 + ...st_list_bookshelves_by_resource_group.json | 184 ++ ...test_list_bookshelves_by_subscription.json | 264 +++ ...yTestBookshelvestest_update_bookshelf.json | 72 + ...entstest_create_chat_model_deployment.json | 60 + ...entstest_delete_chat_model_deployment.json | 32 + ...oymentstest_get_chat_model_deployment.json | 52 + ...t_chat_model_deployments_by_workspace.json | 54 + ...test_list_node_pools_by_supercomputer.json | 66 + ....pyTestOperationstest_list_operations.json | 1501 ++++++++++++ ...ojectstest_list_projects_by_workspace.json | 37 + ...torageAssetstest_create_storage_asset.json | 66 + ...torageAssetstest_delete_storage_asset.json | 38 + ...stStorageAssetstest_get_storage_asset.json | 54 + ...t_storage_assets_by_storage_container.json | 64 + ...torageAssetstest_update_storage_asset.json | 66 + ...ntainerstest_create_storage_container.json | 70 + ...ntainerstest_delete_storage_container.json | 32 + ...eContainerstest_get_storage_container.json | 56 + ..._storage_containers_by_resource_group.json | 66 + ...st_storage_containers_by_subscription.json | 85 + ...ntainerstest_update_storage_container.json | 68 + ...percomputerstest_create_supercomputer.json | 100 + ...percomputerstest_delete_supercomputer.json | 38 + ...tSupercomputerstest_get_supercomputer.json | 78 + ...list_supercomputers_by_resource_group.json | 88 + ...t_list_supercomputers_by_subscription.json | 268 +++ ...est_tools.pyTestToolstest_create_tool.json | 175 ++ ...est_tools.pyTestToolstest_delete_tool.json | 38 + .../test_tools.pyTestToolstest_get_tool.json | 109 + ...oolstest_list_tools_by_resource_group.json | 119 + ...tToolstest_list_tools_by_subscription.json | 2057 +++++++++++++++++ ...est_tools.pyTestToolstest_update_tool.json | 121 + ...pyTestWorkspacestest_create_workspace.json | 97 + ...pyTestWorkspacestest_delete_workspace.json | 32 + ...es.pyTestWorkspacestest_get_workspace.json | 76 + ...est_list_workspaces_by_resource_group.json | 80 + ...stest_list_workspaces_by_subscription.json | 366 +++ ...pyTestWorkspacestest_update_workspace.json | 84 + .../tests/test_bookshelves.py | 69 + .../tests/test_chat_model_deployments.py | 64 + .../tests/test_hero_scenario.py | 364 +++ .../tests/test_node_pools.py | 86 + .../tests/test_operations.py | 24 + .../tests/test_private_endpoints.py | 170 ++ .../tests/test_projects.py | 78 + .../tests/test_storage_assets.py | 78 + .../tests/test_storage_containers.py | 79 + .../tests/test_supercomputers.py | 84 + .../azure-mgmt-discovery/tests/test_tools.py | 132 ++ .../tests/test_unit_client.py | 51 + .../tests/test_unit_models.py | 92 + .../tests/test_workspaces.py | 105 + .../azure-mgmt-discovery/tests/testcase.py | 37 + 59 files changed, 8632 insertions(+), 2 deletions(-) create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/__init__.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/conftest.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_operations.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_projects.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_tools.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py create mode 100644 sdk/discovery/azure-mgmt-discovery/tests/testcase.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9cba65a6de38..754a3a0056ef 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -237,6 +237,10 @@ # PRLabel: %Digital Twins /sdk/digitaltwins/ @Aashish93-stack @johngallardo @Satya-Kolluri @sjiherzig +# ServiceLabel: %Discovery +# PRLabel: %Discovery +/sdk/discovery/ @lawaloy @achocron @sylar217 + # PRLabel: %Document Intelligence /sdk/documentintelligence/ @bojunehsu @yungshinlintw @@ -339,8 +343,6 @@ # PRLabel: %Monitor /sdk/monitor/azure-monitor-ingestion/ @Azure/azure-sdk-write-monitor-data-plane -# PRLabel: %Cognitive - Content Understanding -/sdk/contentunderstanding/ @bojunehsu @changjian-wang @chienyuanchang @yungshinlintw # PRLabel: %Monitor /sdk/monitor/azure-monitor-query/ @Azure/azure-sdk-write-monitor-query-logs diff --git a/sdk/discovery/azure-mgmt-discovery/tests/__init__.py b/sdk/discovery/azure-mgmt-discovery/tests/__init__.py new file mode 100644 index 000000000000..b74cfa3b899c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/__init__.py @@ -0,0 +1,4 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ diff --git a/sdk/discovery/azure-mgmt-discovery/tests/conftest.py b/sdk/discovery/azure-mgmt-discovery/tests/conftest.py new file mode 100644 index 000000000000..21acae3a2965 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/conftest.py @@ -0,0 +1,34 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Pytest configuration for azure-mgmt-discovery tests. + +Management SDK tests run as live-only tests (no recordings needed). +""" +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid recording sensitive identity information +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + """Add sanitizers to remove sensitive information.""" + subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=client_secret, value="00000000-0000-0000-0000-000000000000") + add_header_regex_sanitizer(key="Authorization", value="Sanitized") diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json new file mode 100644 index 000000000000..4e796804c5df --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json @@ -0,0 +1,70 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-324938be?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "23", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "location": "uksouth" + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "652", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 20:10:13 GMT", + "ETag": "\"79033bd8-0000-1000-0000-69a5eea40000\"", + "Expires": "-1", + "mise-correlation-id": "c804aa00-55ce-47cc-95f5-9a7dd1976588", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "61c6eb23-7954-4e7b-b0d7-0d08b7e0d40b", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/679dd090-3515-49e4-97f5-5ea1a65da394", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "800", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T201013Z:61c6eb23-7954-4e7b-b0d7-0d08b7e0d40b", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 070542FAAA494B55B4E9EC4D9D7B6B43 Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:10:03Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:10:07.3380149Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json new file mode 100644 index 000000000000..4ff62029e064 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json @@ -0,0 +1,32 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-9379e896?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Mon, 02 Mar 2026 20:46:09 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "fb6d510e-bb80-45ce-a5ab-ed9841d7dbf4", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T204610Z:fb6d510e-bb80-45ce-a5ab-ed9841d7dbf4", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: F97C127F618E4B0084727E7935FEB279 Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:46:09Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json new file mode 100644 index 000000000000..2ab0ca4d669c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json @@ -0,0 +1,62 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-05fbc43d?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "617", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:19 GMT", + "ETag": "\"65016e0f-0000-1100-0000-69a5e2920000\"", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "0dd430d4-b434-4dfa-90df-9b60abf4a803", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165620Z:0dd430d4-b434-4dfa-90df-9b60abf4a803", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 1601489564644BE09FE97C6EB94742BD Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:19Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:01:52.2939662Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json new file mode 100644 index 000000000000..958c0ae7acdc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json @@ -0,0 +1,184 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "3101", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:58:24 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "4d261cc9-dd97-4ec9-91bc-f08cfab70f35", + "x-ms-original-request-ids": [ + "ac4624a0-643a-478a-82b6-83e0652da0c5", + "b8e40bec-f950-450a-bca9-5799832c8e68", + "7b1fde99-c46e-4d8e-a107-b9118d09bfdd", + "7a29777c-4d4b-4083-8ef4-f88028dba681", + "db58c732-b73f-46b1-a06b-b6754775ff0a", + "26da5536-b8bc-440f-bf75-58d621cf1c56" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165824Z:4d261cc9-dd97-4ec9-91bc-f08cfab70f35", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 077581DCBB3E4E6C91B1438C9544A77F Ref B: SN4AA2022301039 Ref C: 2026-03-03T16:58:23Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:01:52.2939662Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:25:18.1136078Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:25:18.1136078Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-0b008665-8ndxj5", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-0b008665.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:28:46.0772936Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:28:46.0772936Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-df5e8667-f6m0ma", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-df5e8667.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:57:00.0609603Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:57:00.0609603Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-64969832-l9mm7k", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-64969832.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:10:07.3380149Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json new file mode 100644 index 000000000000..05848e1713c9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json @@ -0,0 +1,264 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/bookshelves?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "4849", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:55:31 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "5f8132ea-29c2-4f34-9b8c-cd7b9d01e43b", + "x-ms-original-request-ids": [ + "3a150c96-ed70-4d39-807f-7bea13e124a8", + "e68050ec-418a-4e8c-8f29-cc839fd05ca8", + "69a857b6-8305-46a5-9378-0a06b110e94b", + "487af5a5-7c30-4f78-b3f6-af5c9ec1cddd", + "a94c0f38-de23-417f-8c01-c0af2c593d1f", + "bc031b8c-7a13-495d-b7d3-fdd32d21a54f" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165531Z:5f8132ea-29c2-4f34-9b8c-cd7b9d01e43b", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 1763DBA97007446DA37E5CECF2F99B37 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:55:30Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2025-11-04T14:16:04.2743996Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2025-11-04T14:16:04.2743996Z" + }, + "properties": { + "provisioningState": "Succeeded", + "bookshelfUri": "https://itbshlfpsu11.bookshelf-dev.discovery.azure.com", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "managedResourceGroup": "itbshlfpsu11-mrg-pf3i44" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:20:20.0965772Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:20:20.0965772Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-itbshlfrp114-v9zm2i", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded", + "bookshelfUri": "https://itbshlfrp114.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-23T09:08:18.5211656Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-23T09:08:18.5211656Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-yaoswal-bookshelf-test-nxw403", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded", + "bookshelfUri": "https://yaoswal-bookshelf-test.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:01:52.2939662Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:25:18.1136078Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:25:18.1136078Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-0b008665-8ndxj5", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-0b008665.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:28:46.0772936Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:28:46.0772936Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-df5e8667-f6m0ma", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-df5e8667.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:57:00.0609603Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T19:57:00.0609603Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-64969832-l9mm7k", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-64969832.bookshelf-dev.discovery.azure.com" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:10:07.3380149Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded", + "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json new file mode 100644 index 000000000000..fd7429b87ba1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json @@ -0,0 +1,72 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-05fbc43d?api-version=2026-02-01-preview", + "RequestMethod": "PATCH", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "46", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "652", + "Content-Type": "application/json; charset=utf-8", + "Date": "Thu, 05 Mar 2026 15:23:13 GMT", + "ETag": "\"17015f06-0000-1000-0000-69a99fe10000\"", + "Expires": "-1", + "mise-correlation-id": "4216893a-51d8-46da-9e42-614f457e280a", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "7bc71812-89c2-4531-a9e1-e0d436f95505", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/c33c19cb-fc33-4d80-a73e-6a2ebf88e3aa", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152313Z:7bc71812-89c2-4531-a9e1-e0d436f95505", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: F626F17645054505AA21DA12F32E0E24 Ref B: SN4AA2022303027 Ref C: 2026-03-05T15:23:09Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/bookshelves", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T19:01:52.2939662Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-05T15:23:10.8744515Z" + }, + "properties": { + "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", + "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "workloadIdentities": {}, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json new file mode 100644 index 000000000000..e5a98622a580 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json @@ -0,0 +1,60 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", + "Content-Type": "application/json", + "Content-Length": "80" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "modelFormat": "OpenAI", + "modelName": "gpt-4o" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "625", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "ETag": "\"1407f7ee-0000-1000-0000-69a8a1690000\"", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", + "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "x-ms-ratelimit-remaining-subscription-writes": "800", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211730Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "Date": "Wed, 04 Mar 2026 21:17:30 GMT" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces/chatmodeldeployments", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T21:17:26.2873892Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" + }, + "properties": { + "modelFormat": "OpenAI", + "modelName": "gpt-4o", + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json new file mode 100644 index 000000000000..cfec2ed54381 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json @@ -0,0 +1,32 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", + "Content-Length": "0" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Expires": "-1", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "e4a1b2c3-d4e5-6f7a-8b9c-0d1e2f3a4b5c", + "x-ms-correlation-request-id": "f5a6b7c8-d9e0-1f2a-3b4c-5d6e7f8a9b0c", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T212000Z:f5a6b7c8-d9e0-1f2a-3b4c-5d6e7f8a9b0c", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "Date": "Wed, 04 Mar 2026 21:20:00 GMT" + }, + "ResponseBody": null + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json new file mode 100644 index 000000000000..6fa54aa0c108 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json @@ -0,0 +1,52 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "625", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "ETag": "\"1407f7ee-0000-1000-0000-69a8a1690000\"", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", + "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211830Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "Date": "Wed, 04 Mar 2026 21:18:30 GMT" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces/chatmodeldeployments", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T21:17:26.2873892Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" + }, + "properties": { + "modelFormat": "OpenAI", + "modelName": "gpt-4o", + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json new file mode 100644 index 000000000000..48a8e98bacdd --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json @@ -0,0 +1,54 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", + "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211730Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "Date": "Wed, 04 Mar 2026 21:17:30 GMT" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces/chatmodeldeployments", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T21:17:26.2873892Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" + }, + "properties": { + "modelFormat": "OpenAI", + "modelName": "gpt-4o", + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json new file mode 100644 index 000000000000..3451ab14c904 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json @@ -0,0 +1,66 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/rp114-rg/providers/Microsoft.Discovery/supercomputers/itsuperp114/nodePools?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "660", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 15:40:44 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "bd085dc4-1a43-4236-9291-456d3bfcb2c3", + "x-ms-original-request-ids": [ + "f33a7c16-e0db-4d84-aa70-abc4714ac281", + "4ff34f51-c5f8-47e8-90d6-1a12550356e0", + "64037399-fb5b-4862-8624-f49768a7efb0", + "7bdacbdd-c72e-4870-a182-b5c0587c4769", + "f1a6329f-a25e-4b3d-b480-9a78287e8c57", + "21131c19-ebab-446b-a948-68fe65f132f0" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T154044Z:bd085dc4-1a43-4236-9291-456d3bfcb2c3", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 5263AED01E924F51895C061BE882D02D Ref B: SN4AA2022304047 Ref C: 2026-03-02T15:40:44Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers/nodepools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:33:59.5340715Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:33:59.5340715Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-nodepool", + "vmSize": "Standard_D4s_v6", + "maxNodeCount": 3, + "minNodeCount": 1, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json new file mode 100644 index 000000000000..eb2c83e549ca --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json @@ -0,0 +1,1501 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/providers/Microsoft.Discovery/operations?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "29943", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 15:38:40 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "eb9a66a6-5d8e-4ccc-91c8-2fe6f009614d", + "x-ms-operation-identifier": "", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-tenant-reads": "2199", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T153841Z:eb9a66a6-5d8e-4ccc-91c8-2fe6f009614d", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: A75CCC15437A42808BE41661E746D215 Ref B: SN4AA2022302037 Ref C: 2026-03-02T15:38:41Z" + }, + "ResponseBody": { + "value": [ + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "Microsoft.Discovery", + "operation": "Register the Microsoft.Discovery", + "description": "Register the subscription for Microsoft.Discovery" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "Microsoft.Discovery", + "operation": "Unregister the Microsoft.Discovery", + "description": "Unregister the subscription for Microsoft.Discovery" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "locations/operationStatuses", + "operation": "read_operationStatuses", + "description": "read operationStatuses" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "locations/operationStatuses", + "operation": "write_operationStatuses", + "description": "write operationStatuses" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "operations", + "operation": "read_operations", + "description": "read operations" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "checkNameAvailability", + "operation": "action_checkNameAvailability", + "description": "action checkNameAvailability" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_ListBySubscription", + "description": "List Workspace resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_ListByResourceGroup", + "description": "List Workspace resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_Get", + "description": "Get a Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_CreateOrUpdate", + "description": "Create a Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_Delete", + "description": "Delete a Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces", + "operation": "Workspaces_Update", + "description": "Update a Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_ListBySubscription", + "description": "List Supercomputer resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_ListByResourceGroup", + "description": "List Supercomputer resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_Get", + "description": "Get a Supercomputer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_CreateOrUpdate", + "description": "Create a Supercomputer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_Delete", + "description": "Delete a Supercomputer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers", + "operation": "Supercomputers_Update", + "description": "Update a Supercomputer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_ListBySubscription", + "description": "List Storage resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_ListByResourceGroup", + "description": "List Storage resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_Get", + "description": "Get a Storage" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_CreateOrUpdate", + "description": "Create a Storage" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_Delete", + "description": "Delete a Storage" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_Update", + "description": "Update a Storage" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_ListBySubscription", + "description": "List Agent resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_ListBySubscription", + "description": "List Bookshelf resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_ListBySubscription", + "description": "List DataContainer resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_ListBySubscription", + "description": "List DataAsset resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_ListBySubscription", + "description": "List StorageContainer resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_ListBySubscription", + "description": "List StorageAsset resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_ListBySubscription", + "description": "List ChatModelDeployment resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_ListBySubscription", + "description": "List Model resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_ListBySubscription", + "description": "List NodePool resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_ListBySubscription", + "description": "List Tool resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_ListBySubscription", + "description": "List Workflow resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_ListBySubscription", + "description": "List Project resources by subscription ID" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_ListByResourceGroup", + "description": "List Agent resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_Get", + "description": "Get a Agent" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_CreateOrUpdate", + "description": "Create a Agent" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_Update", + "description": "Update a Agent" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "agents", + "operation": "Agents_Delete", + "description": "Delete a Agent" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_ListByResourceGroup", + "description": "List Bookshelf resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_Get", + "description": "Get a Bookshelf" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_CreateOrUpdate", + "description": "Create a Bookshelf" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_Update", + "description": "Update a Bookshelf" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves", + "operation": "Bookshelves_Delete", + "description": "Delete a Bookshelf" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_ListByResourceGroup", + "description": "List DataContainer resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_Get", + "description": "Get a DataContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_CreateOrUpdate", + "description": "Create a DataContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_Update", + "description": "Update a DataContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataContainers", + "operation": "DataContainers_Delete", + "description": "Delete a DataContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_ListByDataContainer", + "description": "List DataAsset resources by DataContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_Get", + "description": "Get a DataAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_CreateOrUpdate", + "description": "Create a DataAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_Update", + "description": "Update a DataAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "dataAssets", + "operation": "DataAssets_Delete", + "description": "Delete a DataAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_ListByResourceGroup", + "description": "List StorageContainer resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_Get", + "description": "Get a StorageContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_CreateOrUpdate", + "description": "Create a StorageContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_Update", + "description": "Update a StorageContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageContainers", + "operation": "StorageContainers_Delete", + "description": "Delete a StorageContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_ListByStorageContainer", + "description": "List StorageAsset resources by StorageContainer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_Get", + "description": "Get a StorageAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_CreateOrUpdate", + "description": "Create a StorageAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_Update", + "description": "Update a StorageAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storageAssets", + "operation": "StorageAssets_Delete", + "description": "Delete a StorageAsset" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_ListByWorkspace", + "description": "List ChatModelDeployment resources by Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_Get", + "description": "Get a ChatModelDeployment" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_CreateOrUpdate", + "description": "Create a ChatModelDeployment" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_Update", + "description": "Update a ChatModelDeployment" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "chatModelDeployments", + "operation": "ChatModelDeployments_Delete", + "description": "Delete a ChatModelDeployment" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_ListByResourceGroup", + "description": "List Model resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_Get", + "description": "Get a Model" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_CreateOrUpdate", + "description": "Create a Model" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_Update", + "description": "Update a Model" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "models", + "operation": "Models_Delete", + "description": "Delete a Model" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_ListBySupercomputer", + "description": "List NodePool resources by Supercomputer" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_Get", + "description": "Get a NodePool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_CreateOrUpdate", + "description": "Create a NodePool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_Update", + "description": "Update a NodePool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "nodePools", + "operation": "NodePools_Delete", + "description": "Delete a NodePool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_ListByResourceGroup", + "description": "List Tool resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_Get", + "description": "Get a Tool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_CreateOrUpdate", + "description": "Create a Tool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_Update", + "description": "Update a Tool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_Delete", + "description": "Delete a Tool" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_ListByResourceGroup", + "description": "List Workflow resources by resource group" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_Get", + "description": "Get a Workflow" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_CreateOrUpdate", + "description": "Create a Workflow" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_Update", + "description": "Update a Workflow" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workflows", + "operation": "Workflows_Delete", + "description": "Delete a Workflow" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_ListByWorkspace", + "description": "List Project resources by Workspace" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_Get", + "description": "Get a Project" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_CreateOrUpdate", + "description": "Create a Project" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_Update", + "description": "Update a Project" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "projects", + "operation": "Projects_Delete", + "description": "Delete a Project" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "tools", + "operation": "Tools_Run", + "description": "Run the specified tool in the context of the specified project." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgebases", + "operation": "KnowledgeBases_ListKnowledgeBases", + "description": "List KnowledgeBase resources" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgebases", + "operation": "KnowledgeBases_GetOperationStatus", + "description": "Get status of a KnowledgeBase LRO (create/delete)." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_GetOperationStatus", + "description": "Get status of a KnowledgeBaseVersion LRO (create/delete)." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgebases", + "operation": "KnowledgeBases_DeleteKnowledgeBase", + "description": "Delete a KnowledgeBase." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_ListKnowledgeBaseVersions", + "description": "List KnowledgeBaseVersion resources" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_DeleteKnowledgeBaseVersion", + "description": "Delete a KnowledgeBaseVersion." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_CreateOrUpdateKnowledgeBaseVersion", + "description": "Creates or updates a KnowledgeBaseVersion." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_StartIndexing", + "description": "Start indexing." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_StopIndexing", + "description": "Stop indexing." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "conversations", + "operation": "Conversations_ListConversations", + "description": "List Conversation resources" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "knowledgeBaseVersions", + "operation": "KnowledgeBaseVersions_Search", + "description": "Search the knowledge base." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "conversations", + "operation": "Conversations_CreateConversation", + "description": "Creates a Conversation." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "conversations", + "operation": "Conversations_DeleteConversation", + "description": "Deletes a conversation." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "investigations", + "operation": "Investigations_ListInvestigations", + "description": "List Investigation resources" + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "investigations", + "operation": "Investigations_CreateOrUpdateInvestigation", + "description": "Creates or updates Investigation." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "investigations", + "operation": "Investigations_DeleteInvestigation", + "description": "Delete a Investigation." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "supercomputers/nodepools", + "operation": "Tools_Run", + "description": "Run the specified tool on a supercomputers/nodepool resource." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": true, + "display": { + "provider": "Microsoft.Discovery", + "resource": "storages", + "operation": "Storages_Mount", + "description": "Mount a storage resource on a supercomputers/nodepool." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnectionProxies", + "operation": "Bookshelves_GetPrivateEndpointConnectionProxy", + "description": "Get a private endpoint connection proxy on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnectionProxies", + "operation": "Bookshelves_WritePrivateEndpointConnectionProxy", + "description": "Create a private endpoint connection proxy on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnectionProxies", + "operation": "Bookshelves_DeletePrivateEndpointConnectionProxy", + "description": "Delete a private endpoint connection proxy on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnectionProxies", + "operation": "Bookshelves_ValidatePrivateEndpointConnectionProxy", + "description": "Validate a private endpoint connection proxy on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnections", + "operation": "Bookshelves_GetPrivateEndpointConnection", + "description": "Get a private endpoint connection on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnections", + "operation": "Bookshelves_WritePrivateEndpointConnection", + "description": "Create a private endpoint connection on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnections", + "operation": "Bookshelves_DeletePrivateEndpointConnection", + "description": "Delete a private endpoint connection on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateLinkResources", + "operation": "Bookshelves_GetPrivateLinkResource", + "description": "Get a private link resource on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "bookshelves/privateEndpointConnectionsApproval", + "operation": "Bookshelves_PrivateEndpointConnectionsApproval", + "description": "Approve a private endpoint connection on a bookshelf." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnectionProxies", + "operation": "Workspaces_GetPrivateEndpointConnectionProxy", + "description": "Get a private endpoint connection proxy on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnectionProxies", + "operation": "Workspaces_WritePrivateEndpointConnectionProxy", + "description": "Create a private endpoint connection proxy on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnectionProxies", + "operation": "Workspaces_DeletePrivateEndpointConnectionProxy", + "description": "Delete a private endpoint connection proxy on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnectionProxies", + "operation": "Workspaces_ValidatePrivateEndpointConnectionProxy", + "description": "Validate a private endpoint connection proxy on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnections", + "operation": "Workspaces_GetPrivateEndpointConnection", + "description": "Get a private endpoint connection on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnections", + "operation": "Workspaces_WritePrivateEndpointConnection", + "description": "Create a private endpoint connection on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnections", + "operation": "Workspaces_DeletePrivateEndpointConnection", + "description": "Delete a private endpoint connection on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateLinkResources", + "operation": "Workspaces_GetPrivateLinkResource", + "description": "Get a private link resource on a workspace." + }, + "properties": null + }, + { + "name": "Sanitized", + "isDataAction": false, + "display": { + "provider": "Microsoft.Discovery", + "resource": "workspaces/privateEndpointConnectionsApproval", + "operation": "Workspaces_PrivateEndpointConnectionsApproval", + "description": "Approve a private endpoint connection on a workspace." + }, + "properties": null + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json new file mode 100644 index 000000000000..79ff65c7efc2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json @@ -0,0 +1,37 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/newapiversiontest/providers/Microsoft.Discovery/workspaces/wrksptest44/projects?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "12", + "Content-Type": "application/json; charset=utf-8", + "Date": "Wed, 04 Feb 2026 15:48:27 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "421c8562-02dc-41b2-8bcc-322cc8e5a2ca", + "x-ms-original-request-ids": "9166d87a-188d-42a9-933f-c49f0df916f4", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260204T154827Z:421c8562-02dc-41b2-8bcc-322cc8e5a2ca", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: E6307FB1B53D4C72B57E51521239D7B9 Ref B: SN4AA2022302025 Ref C: 2026-02-04T15:48:26Z" + }, + "ResponseBody": { + "value": [] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json new file mode 100644 index 000000000000..ce3e636c5c3e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json @@ -0,0 +1,66 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "123", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "495", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:01:02 GMT", + "ETag": "\"4b00ed4c-0000-1000-0000-69a705be0000\"", + "Expires": "-1", + "mise-correlation-id": "fbf0ad2c-ff68-428d-be23-20542cd9901d", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "05c0319e-9cfa-411b-8fce-38da30a7bfa4", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/b5bfbdbd-e30b-48a9-805b-701197b4ce9b", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "800", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T160102Z:05c0319e-9cfa-411b-8fce-38da30a7bfa4", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: F2D3F321CE4940B7B653F68DB46AEEBE Ref B: SN4AA2022304021 Ref C: 2026-03-03T16:00:55Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers/storageassets", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-03T16:00:59.1416626Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" + }, + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets", + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json new file mode 100644 index 000000000000..8f6625216d5b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json @@ -0,0 +1,38 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Thu, 05 Mar 2026 15:34:19 GMT", + "ETag": "\"3501b5f8-0000-1000-0000-69a9a27a0000\"", + "Expires": "-1", + "mise-correlation-id": "b0c6342d-bf79-446f-894d-ac3b4806be64", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "b8d9d8d0-ae99-4b15-8cfe-50ca2812ae4b", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/9f03118b-de92-4c9a-b2cf-d7584f292739", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T153419Z:b8d9d8d0-ae99-4b15-8cfe-50ca2812ae4b", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: C83969BB50A74F2880C72E7ED8B0F5B7 Ref B: SN4AA2022305039 Ref C: 2026-03-05T15:34:15Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json new file mode 100644 index 000000000000..1e1012da8028 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json @@ -0,0 +1,54 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "466", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:57:22 GMT", + "ETag": "\"23005205-0000-1100-0000-69a706050000\"", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "c4989ccd-5e10-47f9-8f7b-992c408bd003", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165723Z:c4989ccd-5e10-47f9-8f7b-992c408bd003", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: C0CD5C9470934E908BAFBE47532CB194 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:22Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers/storageassets", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-03T16:00:59.1416626Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" + }, + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets", + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json new file mode 100644 index 000000000000..ce5687b8d5ec --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json @@ -0,0 +1,64 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "478", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:57:13 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "38247173-cf4f-4f5a-afa0-f852a86405fc", + "x-ms-original-request-ids": [ + "1d647d30-33b9-439b-a2f0-2d58b7ac8faa", + "d1345bdb-60e4-46f5-815f-771fd96775b0", + "3caf5592-135b-49ad-8d34-e6a2688e69db", + "12fc2d33-a863-4941-8b59-c180e83b533b", + "c1debb6a-c718-42d7-9cb8-8672380d56e0", + "d366118e-57c6-4362-9e49-5c691c1ab793" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165714Z:38247173-cf4f-4f5a-afa0-f852a86405fc", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 9B840026E4E3499CB21D8C0C80CD856B Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:13Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers/storageassets", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-03T16:00:59.1416626Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" + }, + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets", + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json new file mode 100644 index 000000000000..285f99d81c8a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json @@ -0,0 +1,66 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", + "RequestMethod": "PATCH", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "46", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "529", + "Content-Type": "application/json; charset=utf-8", + "Date": "Thu, 05 Mar 2026 15:24:42 GMT", + "ETag": "\"34010cee-0000-1000-0000-69a9a03a0000\"", + "Expires": "-1", + "mise-correlation-id": "20278466-5aac-4316-8254-681410f23aa2", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "1ffbcbfa-2b29-421e-9bf1-f6d7e8fc2d3f", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/63f61a44-0d5f-47de-b453-09118032ee47", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152442Z:1ffbcbfa-2b29-421e-9bf1-f6d7e8fc2d3f", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 06A1444688884787918F4527029B6586 Ref B: SN4AA2022303027 Ref C: 2026-03-05T15:24:39Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers/storageassets", + "location": "uksouth", + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-03T16:00:59.1416626Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-05T15:24:40.3080887Z" + }, + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets", + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json new file mode 100644 index 000000000000..1ac232150ee1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json @@ -0,0 +1,70 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "235", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "storageStore": { + "kind": "AzureStorageBlob", + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr" + } + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "593", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 20:45:52 GMT", + "ETag": "\"9101ee8a-0000-1000-0000-69a5f7000000\"", + "Expires": "-1", + "mise-correlation-id": "b83707e3-b44e-4ce3-b628-47b7c5fc7b8b", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "a97e7fdb-968f-4c4a-8988-5373a62a8cb5", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/7affb2b8-0fa0-4fcd-9228-6f4380dc12d9", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T204553Z:a97e7fdb-968f-4c4a-8988-5373a62a8cb5", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 16838EA751A546F7BF37A5F6CA3A82AB Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:45:47Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:45:51.2603686Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json new file mode 100644 index 000000000000..97d788d2d519 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json @@ -0,0 +1,32 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Thu, 05 Mar 2026 16:16:36 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "6aa0ee30-075f-4371-bcbe-fc7024a876b3", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T161637Z:6aa0ee30-075f-4371-bcbe-fc7024a876b3", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 56E0240202D740298DE1A112D472CA14 Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:16:36Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json new file mode 100644 index 000000000000..a924dba9f86b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json @@ -0,0 +1,56 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "563", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:49 GMT", + "ETag": "\"00005e1a-0000-1100-0000-69a5f73d0000\"", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "5e8c3116-f76d-4cac-8dac-bf5e8272b7ec", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165650Z:5e8c3116-f76d-4cac-8dac-bf5e8272b7ec", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 7E4F06471CD7419BB8D9DFD12883ABE1 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:49Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:45:51.2603686Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json new file mode 100644 index 000000000000..b25187edf727 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json @@ -0,0 +1,66 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "575", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:39 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "f96ec5fc-d361-42a0-bb50-accab8495cfb", + "x-ms-original-request-ids": [ + "b0589582-e88a-43e9-ad13-7a12bf8e251b", + "bb0f09fb-174b-4d75-a2b4-a9048503dd80", + "d9822582-d6de-475e-aa18-e7a9a7bfcbdd", + "23f70422-6ec4-467b-8c6b-00a3b54ed23b", + "19068a6f-a31c-4396-b8b2-ccda25a6b2d7", + "1d518613-bcef-4957-bb52-ec4f7ec84ff8" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165640Z:f96ec5fc-d361-42a0-bb50-accab8495cfb", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 1E1FC7A1B1BE43BE9359DA1BFD41998C Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:39Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:45:51.2603686Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json new file mode 100644 index 000000000000..a7faed18513d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json @@ -0,0 +1,85 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/storageContainers?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1070", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:44 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "1d3b7639-709d-410f-a99e-347bfe6e2520", + "x-ms-original-request-ids": [ + "128a456b-21d4-480d-98ba-faf629e50062", + "02a69c46-a2bb-41dd-a69f-78b51e06c6b6", + "342bb941-e37d-4b38-b604-ad4758ef1a49", + "3df070ce-6ec2-4c6c-9b01-fc9121d9776a", + "d3c3125f-f407-4c4d-9f09-b676a2f8bba2", + "9675f956-e7b1-42d8-9e1f-2fcf6df9e339" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165645Z:1d3b7639-709d-410f-a99e-347bfe6e2520", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 46B242B0E81942C498579F3A2309AF54 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:44Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": {}, + "systemData": { + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-03-02T20:56:51.8425901Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/deray-private-test/providers/Microsoft.Storage/storageAccounts/derayprstg", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:45:51.2603686Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json new file mode 100644 index 000000000000..8b6317328228 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json @@ -0,0 +1,68 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", + "RequestMethod": "PATCH", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "46", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "627", + "Content-Type": "application/json; charset=utf-8", + "Date": "Thu, 05 Mar 2026 15:32:33 GMT", + "ETag": "\"02003231-0000-1000-0000-69a9a2110000\"", + "Expires": "-1", + "mise-correlation-id": "a5e9029e-b294-4d20-94b5-94bab37c28e2", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "7de6c9bb-1ac1-45e3-86dd-57473dc24f56", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/58fb1db6-4957-4392-a237-0d7fec9758b7", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T153234Z:7de6c9bb-1ac1-45e3-86dd-57473dc24f56", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 6B12E9230F404F4ABABF5B16A8BE3566 Ref B: SN4AA2022304051 Ref C: 2026-03-05T15:32:31Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/storagecontainers", + "location": "uksouth", + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:45:51.2603686Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-05T15:32:32.3368841Z" + }, + "properties": { + "storageStore": { + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", + "kind": "AzureStorageBlob" + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json new file mode 100644 index 000000000000..f23d2ddd74da --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json @@ -0,0 +1,100 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "453", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": { + "id": "Sanitized" + }, + "kubeletIdentity": { + "id": "Sanitized" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": {} + } + } + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1243", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 20:26:41 GMT", + "ETag": "\"e101d9d9-0000-1000-0000-69a5f2820000\"", + "Expires": "-1", + "mise-correlation-id": "4f7d7d75-d760-4ec1-b640-15b0fbcf3b07", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "b1a9b1bb-311b-40b3-aed7-78037ba0d1ad", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/e2195b76-5489-4fe6-a942-8ed47002bea9", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T202642Z:b1a9b1bb-311b-40b3-aed7-78037ba0d1ad", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 6C2AD6EA55DE440EA450F96E026D12CE Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:26:35Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:26:38.4028447Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:26:38.4028447Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_D4s_v6", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json new file mode 100644 index 000000000000..c642ecddca09 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json @@ -0,0 +1,38 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Thu, 05 Mar 2026 16:04:52 GMT", + "ETag": "\"3e0096f1-0000-1000-0000-69a9a9a40000\"", + "Expires": "-1", + "mise-correlation-id": "c7152a84-2565-418f-8a89-fe8f6008a10e", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "182e92e9-49a4-4087-ae35-b131b971ee7a", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/d42a3ac5-0953-407c-912d-d31b077ba1e3", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T160453Z:182e92e9-49a4-4087-ae35-b131b971ee7a", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 28FDC45C02654AF386BE3B1358304C4A Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:04:50Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json new file mode 100644 index 000000000000..3a1520d6ac1d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json @@ -0,0 +1,78 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1198", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:34 GMT", + "ETag": "\"f80198c3-0000-1000-0000-69a61bb00000\"", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "00bb6c3e-6d7f-44d5-bef2-eb9500417db4", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165635Z:00bb6c3e-6d7f-44d5-bef2-eb9500417db4", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 1243CEAFB57F4CD4B783D528FA0097C2 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:34Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:26:38.4028447Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_D4s_v6", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json new file mode 100644 index 000000000000..3f4d62f7938d --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json @@ -0,0 +1,88 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1210", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:25 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "86ed8968-dd64-46a4-80d6-7af7a10d76b6", + "x-ms-original-request-ids": [ + "18f7d244-a82f-45e2-aebe-c34308c2aa3b", + "33b0a420-8c88-4600-8794-330571b2fe8b", + "34317fa1-86b7-44c2-924c-e676d5065983", + "602166b4-ebdb-4ee3-8313-ad710669ba36", + "207ed322-ff3a-4d35-97d1-14b87829966e", + "ae5581c7-b238-4efc-8ae1-db1e76740a11" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165626Z:86ed8968-dd64-46a4-80d6-7af7a10d76b6", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 85DB5D24A851438DB933E355650980E0 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:25Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:26:38.4028447Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_D4s_v6", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json new file mode 100644 index 000000000000..669bf09dabf2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json @@ -0,0 +1,268 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/supercomputers?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "6214", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:30 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "0d5fe431-a3be-4211-b90f-a28a9b4ecffe", + "x-ms-original-request-ids": [ + "9dcdcbe5-1551-4c9d-86f6-35ef8fd9f25a", + "78ac76bb-9727-4bef-ba20-606c17274eae", + "0117e727-04ef-438c-bf02-cfeda583b7d3", + "f83716ec-8acb-4279-8cc4-e6d1d131f2c1", + "5d21fb26-951c-413c-b380-d28ebe5914fc", + "329a4075-e041-4d8e-94d2-b2d7dfbec5b6" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1098", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165631Z:0d5fe431-a3be-4211-b90f-a28a9b4ecffe", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: D2A5DAC3072943CAB3E8220C9816B26E Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:31Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "tags": { + "createdBy": "Sanitized" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2025-11-18T14:01:46.9205594Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-19T13:40:29.0395037Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_D4s_v5", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Failed" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "tags": { + "createdBy": "Sanitized" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:18:20.4616551Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:34:40.7398302Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-19T14:27:41.4552807Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-19T16:36:06.8831442Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Failed" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-19T14:28:47.9432207Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-19T16:23:40.0680035Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_B4ms", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Failed" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/supercomputers", + "location": "uksouth", + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:26:38.4028447Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" + }, + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "kubeletIdentity": { + "id": "Sanitized", + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + }, + "workloadIdentities": { + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { + "principalId": "00000000-0000-0000-0000-000000000000", + "clientId": "00000000-0000-0000-0000-000000000000" + } + } + }, + "systemSku": "Standard_D4s_v6", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json new file mode 100644 index 000000000000..f09dc440833e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json @@ -0,0 +1,175 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "959", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + } + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1399", + "Content-Type": "application/json; charset=utf-8", + "Date": "Mon, 02 Mar 2026 21:02:34 GMT", + "ETag": "\"5202f689-0000-1000-0000-69a5faea0000\"", + "Expires": "-1", + "mise-correlation-id": "df95acd4-2805-47d7-935e-f90b98a20e12", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "6c02fc28-d2f7-42a0-9d7f-75e2865ca4ae", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/129656c1-1656-49c7-a842-a7dbeb530c5f", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260302T210235Z:6c02fc28-d2f7-42a0-9d7f-75e2865ca4ae", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 9C4A47B80DDC43CBA5F21AC72A0BAFBC Ref B: SN4AA2022302033 Ref C: 2026-03-02T21:02:29Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T21:02:33.801961Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T21:02:33.801961Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json new file mode 100644 index 000000000000..c7acd113ab93 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json @@ -0,0 +1,38 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Thu, 05 Mar 2026 16:25:45 GMT", + "ETag": "\"57005f4d-0000-1000-0000-69a9ae8a0000\"", + "Expires": "-1", + "mise-correlation-id": "658a0cb8-2437-4086-b630-88d78341fe20", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "2578b064-b489-4a83-bf36-8ad37814c1c6", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/45996932-0745-44bc-99d5-292a2f7de8c5", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T162546Z:2578b064-b489-4a83-bf36-8ad37814c1c6", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 2F889D57B7CF4F6F9161EEBE4786734A Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:25:43Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json new file mode 100644 index 000000000000..f5b87812fbf1 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json @@ -0,0 +1,109 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1306", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:57:05 GMT", + "ETag": "\"b6006ee1-0000-1100-0000-69a5fb340000\"", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "67238903-bbd5-459d-a4e1-9449565cf851", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165706Z:67238903-bbd5-459d-a4e1-9449565cf851", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 982886DE539D41C0AB2851506AEDD7B5 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:05Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T21:02:33.801961Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T21:02:33.801961Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json new file mode 100644 index 000000000000..3b3abe5414e7 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json @@ -0,0 +1,119 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1318", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:59 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "541d9c0a-94d2-4844-a123-fb5b0e0c8666", + "x-ms-original-request-ids": [ + "d41b656b-4cb2-42e6-a1e9-2ff9d5e481bd", + "e16a30c3-93f0-4ea4-8c00-083db7bc4f48", + "69ee1886-677c-4c20-9732-964b577cfc99", + "bdcc1e83-f278-4236-aa41-b726b3bd6ba0", + "923061dc-ba8c-4eed-9065-70a54f7d2352", + "2fdfc08b-6123-41f3-a070-8f60fedbf59e" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165700Z:541d9c0a-94d2-4844-a123-fb5b0e0c8666", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 9D5B89D7B96544C797A3CB22F206348C Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:59Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T21:02:33.801961Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T21:02:33.801961Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json new file mode 100644 index 000000000000..1949b5c8b5e9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json @@ -0,0 +1,2057 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/tools?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "44417", + "Content-Type": "application/json; charset=utf-8", + "Date": "Tue, 03 Mar 2026 16:56:53 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "ba5c423f-1bea-443b-80d8-fdd88ef15ecc", + "x-ms-original-request-ids": [ + "998e5771-29cf-416c-b142-1107d9b060c8", + "e638e0b5-9012-4b7c-b6fc-0280a449588b", + "0a891fd2-5bda-4025-b342-e6559e275da9", + "b337c21e-20cd-4c86-8234-b867ee2d78ca", + "4e3b42cd-b3c4-4893-b469-709b992130fc", + "3cd9edb1-c696-4314-b0df-27a5ce829793" + ], + "x-ms-ratelimit-remaining-subscription-reads": "1098", + "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165654Z:ba5c423f-1bea-443b-80d8-fdd88ef15ecc", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 89580D6FD89E4356A110CA6E19D8E3D9 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:54Z" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2025-09-16T11:09:11.8986542Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2025-09-16T11:18:07.8437828Z" + }, + "properties": { + "environmentVariables": { + "LOGP_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-logp", + "BOILING_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-bp", + "CRITICAL_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-tc", + "DENSITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-density", + "SOLUBILITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-solubility" + }, + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\"\".", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:40:15.3174601Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:49:16.0537055Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\"\".", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": { + "LOGP_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-logp", + "BOILING_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-bp", + "CRITICAL_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-tc", + "DENSITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-density", + "SOLUBILITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-solubility" + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:40:39.3470173Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:48:18.2325096Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:41:02.0050284Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:48:19.5365998Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This tool is used to test inline files and output mount configurations.", + "version": "1.0.0", + "category": "testing", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/testiotool:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "2Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Test action to validate inline file input and output mount functionality.", + "input_schema": { + "type": "object", + "properties": { + "test_inline_file": { + "type": "string", + "description": "This is a test inline file, you should provide some dummy text" + } + }, + "required": [ + "test_inline_file" + ] + }, + "command": "python test_io_script.py", + "inline_files": [ + { + "mount_path": "/app/input/test_file.txt", + "content": "{{{ test_inline_file }}}" + } + ], + "output_mount_configurations": [ + { + "mount_path": "/app/outputs/", + "auto_promote": true, + "output_name": "testiooutput", + "output_description": "Test output files generated from processing the inline input file" + } + ], + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-12T20:02:50.7457408Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-12T20:03:17.4513262Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-12T20:02:50.8238918Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-12T20:03:53.4456326Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-13T17:13:37.8488789Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-13T17:14:12.587767Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-13T17:13:38.3175742Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-13T17:13:58.3227152Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-14T01:12:40.1293186Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-14T01:13:00.4649916Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-14T01:12:40.4886867Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-02-14T01:13:14.7006469Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-17T13:36:57.2052466Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-17T13:36:57.2052466Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", + "version": "1.0.0", + "category": "Scientific Computing", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "4Gi", + "storage": "8Gi", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "8Gi", + "storage": "32Gi", + "gpu": "0" + }, + "infiniband": false, + "recommended_sku": [ + "Standard_D4s_v3" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." + }, + "csvLimit": { + "type": "integer", + "description": "(Optional) Only process the first N rows of the CSV." + }, + "outputDir": { + "type": "string", + "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." + }, + "outputFile": { + "type": "string", + "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." + }, + "sequenceName": { + "type": "string", + "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ], + "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." + }, + "vhhSequence": { + "type": "string", + "description": "VHH sequence string (required when format is 'vhh')." + }, + "heavyChain": { + "type": "string", + "description": "Heavy chain sequence (required when format is 'conventional')." + }, + "lightChain": { + "type": "string", + "description": "Light chain sequence (required when format is 'conventional')." + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "Path to a CSV file inside the container to build sequences from." + }, + "format": { + "type": "string", + "description": "Sequence format: 'vhh' or 'conventional'." + } + }, + "required": [ + "format", + "csvFile" + ] + }, + "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", + "infra_node": "worker", + "output_mount_configurations": [ + { + "mount_path": "/app/outputs/", + "auto_promote": true, + "output_name": "DevelopabilityResults", + "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." + } + ] + }, + { + "name": "Sanitized", + "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "outputDir": { + "type": "string" + }, + "outputFile": { + "type": "string" + }, + "liabilitiesSeqColumn": { + "type": "string" + }, + "sequence": { + "type": "string" + }, + "chain": { + "type": "string", + "enum": [ + "vhh", + "heavy", + "light" + ] + }, + "includeStructural": { + "type": "boolean" + }, + "scheme": { + "type": "string" + }, + "filterGermline": { + "type": "boolean" + }, + "regionScope": { + "type": "string", + "enum": [ + "ALL", + "CDR" + ] + } + }, + "required": [ + "chain" + ] + }, + "command": "python main.py -a liabilities -c '{{chain}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Submit sequences for multi-parameter optimization and return a job ID.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "sequenceName": { + "type": "string" + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ] + }, + "vhhSequence": { + "type": "string" + }, + "heavyChain": { + "type": "string" + }, + "lightChain": { + "type": "string" + }, + "strategy": { + "type": "string", + "enum": [ + "aggressive", + "balanced", + "conservative" + ] + }, + "maxVariants": { + "type": "integer" + }, + "mutationDepth": { + "type": "integer" + }, + "excludeRegions": { + "type": "string" + }, + "includeOnlyRegions": { + "type": "string" + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a optimize -f '{{format}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Check the status of an optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a status --job-id '{{jobId}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Retrieve results for a completed optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a results --job-id '{{jobId}}'", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-17T13:44:17.1956309Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-17T13:44:17.1956309Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", + "version": "1.0.0", + "category": "Scientific Computing", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "4Gi", + "storage": "8Gi", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "8Gi", + "storage": "32Gi", + "gpu": "0" + }, + "infiniband": false, + "recommended_sku": [ + "Standard_D4s_v3" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." + }, + "csvLimit": { + "type": "integer", + "description": "(Optional) Only process the first N rows of the CSV." + }, + "outputDir": { + "type": "string", + "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." + }, + "outputFile": { + "type": "string", + "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." + }, + "sequenceName": { + "type": "string", + "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ], + "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." + }, + "vhhSequence": { + "type": "string", + "description": "VHH sequence string (required when format is 'vhh')." + }, + "heavyChain": { + "type": "string", + "description": "Heavy chain sequence (required when format is 'conventional')." + }, + "lightChain": { + "type": "string", + "description": "Light chain sequence (required when format is 'conventional')." + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "Path to a CSV file inside the container to build sequences from." + }, + "format": { + "type": "string", + "description": "Sequence format: 'vhh' or 'conventional'." + } + }, + "required": [ + "format", + "csvFile" + ] + }, + "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", + "infra_node": "worker", + "output_mount_configurations": [ + { + "mount_path": "/app/outputs/", + "auto_promote": true, + "output_name": "DevelopabilityResults", + "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." + } + ] + }, + { + "name": "Sanitized", + "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "outputDir": { + "type": "string" + }, + "outputFile": { + "type": "string" + }, + "liabilitiesSeqColumn": { + "type": "string" + }, + "sequence": { + "type": "string" + }, + "chain": { + "type": "string", + "enum": [ + "vhh", + "heavy", + "light" + ] + }, + "includeStructural": { + "type": "boolean" + }, + "scheme": { + "type": "string" + }, + "filterGermline": { + "type": "boolean" + }, + "regionScope": { + "type": "string", + "enum": [ + "ALL", + "CDR" + ] + } + }, + "required": [ + "chain" + ] + }, + "command": "python main.py -a liabilities -c '{{chain}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Submit sequences for multi-parameter optimization and return a job ID.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "sequenceName": { + "type": "string" + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ] + }, + "vhhSequence": { + "type": "string" + }, + "heavyChain": { + "type": "string" + }, + "lightChain": { + "type": "string" + }, + "strategy": { + "type": "string", + "enum": [ + "aggressive", + "balanced", + "conservative" + ] + }, + "maxVariants": { + "type": "integer" + }, + "mutationDepth": { + "type": "integer" + }, + "excludeRegions": { + "type": "string" + }, + "includeOnlyRegions": { + "type": "string" + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a optimize -f '{{format}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Check the status of an optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a status --job-id '{{jobId}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Retrieve results for a completed optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a results --job-id '{{jobId}}'", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-17T13:50:19.5978763Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-17T13:50:19.5978763Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", + "version": "1.0.0", + "category": "Scientific Computing", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "4Gi", + "storage": "8Gi", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "8Gi", + "storage": "32Gi", + "gpu": "0" + }, + "infiniband": false, + "recommended_sku": [ + "Standard_D4s_v3" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." + }, + "csvLimit": { + "type": "integer", + "description": "(Optional) Only process the first N rows of the CSV." + }, + "outputDir": { + "type": "string", + "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." + }, + "outputFile": { + "type": "string", + "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." + }, + "sequenceName": { + "type": "string", + "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ], + "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." + }, + "vhhSequence": { + "type": "string", + "description": "VHH sequence string (required when format is 'vhh')." + }, + "heavyChain": { + "type": "string", + "description": "Heavy chain sequence (required when format is 'conventional')." + }, + "lightChain": { + "type": "string", + "description": "Light chain sequence (required when format is 'conventional')." + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string", + "description": "Path to a CSV file inside the container to build sequences from." + }, + "format": { + "type": "string", + "description": "Sequence format: 'vhh' or 'conventional'." + } + }, + "required": [ + "format", + "csvFile" + ] + }, + "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", + "infra_node": "worker", + "output_mount_configurations": [ + { + "mount_path": "/app/outputs/", + "auto_promote": true, + "output_name": "DevelopabilityResults", + "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." + } + ] + }, + { + "name": "Sanitized", + "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "outputDir": { + "type": "string" + }, + "outputFile": { + "type": "string" + }, + "liabilitiesSeqColumn": { + "type": "string" + }, + "sequence": { + "type": "string" + }, + "chain": { + "type": "string", + "enum": [ + "vhh", + "heavy", + "light" + ] + }, + "includeStructural": { + "type": "boolean" + }, + "scheme": { + "type": "string" + }, + "filterGermline": { + "type": "boolean" + }, + "regionScope": { + "type": "string", + "enum": [ + "ALL", + "CDR" + ] + } + }, + "required": [ + "chain" + ] + }, + "command": "python main.py -a liabilities -c '{{chain}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Submit sequences for multi-parameter optimization and return a job ID.", + "input_schema": { + "type": "object", + "properties": { + "csvFile": { + "type": "string" + }, + "csvLimit": { + "type": "integer" + }, + "sequenceName": { + "type": "string" + }, + "format": { + "type": "string", + "enum": [ + "vhh", + "conventional" + ] + }, + "vhhSequence": { + "type": "string" + }, + "heavyChain": { + "type": "string" + }, + "lightChain": { + "type": "string" + }, + "strategy": { + "type": "string", + "enum": [ + "aggressive", + "balanced", + "conservative" + ] + }, + "maxVariants": { + "type": "integer" + }, + "mutationDepth": { + "type": "integer" + }, + "excludeRegions": { + "type": "string" + }, + "includeOnlyRegions": { + "type": "string" + } + }, + "required": [ + "format" + ] + }, + "command": "python main.py -a optimize -f '{{format}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Check the status of an optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a status --job-id '{{jobId}}'", + "infra_node": "worker" + }, + { + "name": "Sanitized", + "description": "Retrieve results for a completed optimization job.", + "input_schema": { + "type": "object", + "properties": { + "jobId": { + "type": "string", + "description": "Job identifier string from optimization submission." + } + }, + "required": [ + "jobId" + ] + }, + "command": "python main.py -a results --job-id '{{jobId}}'", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-26T02:02:25.3606852Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-26T02:02:25.3606852Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T21:02:33.801961Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T21:02:33.801961Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "eastus2euap", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-27T21:13:22.6154699Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-27T21:13:22.6154699Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "eastus2", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2025-08-22T17:39:43.5552623Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2025-08-22T18:10:26.5363936Z" + }, + "properties": { + "environmentVariables": {}, + "version": "0.0.1", + "definitionContent": { + "name": "Sanitized", + "version": "1.0", + "infra": [ + { + "name": "Sanitized", + "image": { + "acr": "acrbslftestprod.azurecr.io/bookshelf-indexing-service:3902098" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "4Gi", + "gpu": 0 + }, + "max_resources": { + "cpu": "4", + "ram": "8Gi", + "gpu": 0 + } + } + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "eastus2", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2025-08-22T17:42:21.2183803Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2025-08-22T17:42:21.2183803Z" + }, + "properties": { + "environmentVariables": {}, + "version": "0.0.1", + "definitionContent": { + "name": "Sanitized", + "version": "1.0", + "infra": [ + { + "name": "Sanitized", + "image": { + "acr": "acrbslftestprod.azurecr.io/bookshelf-indexing-service-canary:3908498" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "4Gi", + "gpu": 0 + }, + "max_resources": { + "cpu": "4", + "ram": "8Gi", + "gpu": 0 + } + } + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "eastus", + "tags": {}, + "systemData": { + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-26T02:07:20.087512Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "eastus", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-27T21:16:19.3888378Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-27T21:16:19.3888378Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "This is the coding tool for CorePython cheminformatics operations.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "infra_type": "container", + "name": "Sanitized", + "image": { + "acr": "demodiscoveryacr.azurecr.io/corepython:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "2Gi", + "gpu": "0", + "storage": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "4Gi", + "storage": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "code_environments": [ + { + "language": "python", + "command": "python3 \"/{{ scriptName }}\"", + "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json new file mode 100644 index 000000000000..787bfa358bc8 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json @@ -0,0 +1,121 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", + "RequestMethod": "PATCH", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "Content-Length": "46", + "Content-Type": "application/json", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": { + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Content-Length": "1433", + "Content-Type": "application/json; charset=utf-8", + "Date": "Thu, 05 Mar 2026 15:29:58 GMT", + "ETag": "\"530098c4-0000-1000-0000-69a9a1760000\"", + "Expires": "-1", + "mise-correlation-id": "065b16fe-308e-4d0b-98a5-91dd76d0fe5b", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "x-azure-ref": "Sanitized", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-build-version": "1", + "x-ms-correlation-request-id": "8030a5e8-13b0-4b17-8825-0c57884f221b", + "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/b7944970-25d4-42ad-a7b8-25a01a24a21e", + "x-ms-providerhub-traffic": "True", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152958Z:8030a5e8-13b0-4b17-8825-0c57884f221b", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 2FC92E67AAD94952BE7DD9011315FF1D Ref B: SN4AA2022303047 Ref C: 2026-03-05T15:29:55Z" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/tools", + "location": "uksouth", + "tags": { + "SkipAutoDeleteTill": "2026-12-31" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T21:02:33.801961Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-05T15:29:56.920121Z" + }, + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "Sanitized", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "Sanitized", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": [ + "Standard_D4s_v6" + ], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "Sanitized", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": [ + "action" + ] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + }, + "environmentVariables": {}, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json new file mode 100644 index 000000000000..6bfbe9e5df2f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json @@ -0,0 +1,97 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", + "RequestMethod": "PUT", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", + "Content-Type": "application/json", + "Content-Length": "1121" + }, + "RequestBody": { + "location": "uksouth", + "properties": { + "supercomputerIds": [], + "workspaceIdentity": { + "id": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default3", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default2", + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "publicNetworkAccess": "Disabled" + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "1606", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "ETag": "\"9a00d931-0000-1000-0000-69a88a160000\"", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "c4763dfb-9418-41c4-b9f8-8a3882a37254", + "x-ms-correlation-request-id": "653482e8-5fd8-4f48-acb5-01f2284cb594", + "x-ms-ratelimit-remaining-subscription-global-reads": "16499", + "x-ms-routing-request-id": "UKSOUTH:20260304T193912Z:653482e8-5fd8-4f48-acb5-01f2284cb594", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "X-MSEdge-Ref": "Ref A: CFE2594B44234894ABDC403EA188F3A8 Ref B: SN4AA2022302031 Ref C: 2026-03-04T19:39:12Z", + "Date": "Wed, 04 Mar 2026 19:39:11 GMT" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T19:36:15.7991238Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", + "supercomputerIds": [], + "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", + "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" + }, + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyName": "discoverykey", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json new file mode 100644 index 000000000000..362dbba2ff64 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json @@ -0,0 +1,32 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-397d51cf?api-version=2026-02-01-preview", + "RequestMethod": "DELETE", + "RequestHeaders": { + "Accept": "*/*", + "Connection": "keep-alive", + "Content-Length": "0", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 204, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Date": "Wed, 04 Mar 2026 19:20:57 GMT", + "Expires": "-1", + "Pragma": "no-cache", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Cache": "CONFIG_NOCACHE", + "X-Content-Type-Options": "nosniff", + "x-ms-correlation-request-id": "0ed3acad-a11e-4466-95c3-e80274ea0724", + "x-ms-ratelimit-remaining-subscription-deletes": "799", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T192058Z:0ed3acad-a11e-4466-95c3-e80274ea0724", + "x-ms-throttling-version": "v2", + "X-MSEdge-Ref": "Ref A: 822D8F24B73C45BEA4BC6953384D07AE Ref B: SN4AA2022302035 Ref C: 2026-03-04T19:20:55Z" + }, + "ResponseBody": null + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json new file mode 100644 index 000000000000..ec8b131f353f --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json @@ -0,0 +1,76 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "1607", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "ETag": "\"0502323c-0000-1100-0000-69a8934a0000\"", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "1dcda016-4a3d-4ada-bdea-69e76ff62001", + "x-ms-correlation-request-id": "3539e8a3-d7d6-4658-843f-6162ac25641e", + "x-ms-ratelimit-remaining-subscription-global-reads": "16499", + "x-ms-routing-request-id": "UKSOUTH:20260304T203849Z:3539e8a3-d7d6-4658-843f-6162ac25641e", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "X-MSEdge-Ref": "Ref A: CF625DDD8A3B415C94D9D49490F08B77 Ref B: SN4AA2022305029 Ref C: 2026-03-04T20:38:49Z", + "Date": "Wed, 04 Mar 2026 20:38:49 GMT" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T19:36:15.7991238Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", + "supercomputerIds": [], + "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", + "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" + }, + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyName": "discoverykey", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json new file mode 100644 index 000000000000..874819ed9d62 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json @@ -0,0 +1,80 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "1619", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "x-ms-original-request-ids": "37bbdb9b-8704-4d62-bceb-8a6d5c04eace", + "x-ms-providerhub-traffic": "True", + "x-ms-correlation-request-id": "994eee78-4cf2-4c9f-abbb-d28d558c65a1", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-ratelimit-remaining-subscription-global-reads": "16499", + "x-ms-request-id": "994eee78-4cf2-4c9f-abbb-d28d558c65a1", + "x-ms-routing-request-id": "UKSOUTH:20260304T203847Z:994eee78-4cf2-4c9f-abbb-d28d558c65a1", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "X-MSEdge-Ref": "Ref A: A8F8C59D0E8F4F1BA565F48F60749B0B Ref B: SN4AA2022303045 Ref C: 2026-03-04T20:38:46Z", + "Date": "Wed, 04 Mar 2026 20:38:46 GMT" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T19:36:15.7991238Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", + "supercomputerIds": [], + "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", + "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" + }, + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyName": "discoverykey", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + ] + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json new file mode 100644 index 000000000000..8e683a63e70e --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json @@ -0,0 +1,366 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/workspaces?api-version=2026-02-01-preview", + "RequestMethod": "GET", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" + }, + "RequestBody": null, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "14176", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "x-ms-original-request-ids": "e8db88e0-ee84-4bab-b52b-2192290fd359, 8ef79aee-3c20-4ded-a660-a37e3e5a3d8a, ee2f1d3d-51bf-4352-a382-280a8e3630e0", + "x-ms-ratelimit-remaining-subscription-reads": "1099", + "x-ms-ratelimit-remaining-subscription-global-reads": "16499", + "x-ms-request-id": "06af92d8-dc60-485d-8488-dcb9636118aa", + "x-ms-correlation-request-id": "06af92d8-dc60-485d-8488-dcb9636118aa", + "x-ms-routing-request-id": "SOUTHCENTRALUS:20260304T203845Z:06af92d8-dc60-485d-8488-dcb9636118aa", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "X-MSEdge-Ref": "Ref A: 6E4DD90850C44594B7ACC6D7947D373F Ref B: SN4AA2022301017 Ref C: 2026-03-04T20:38:44Z", + "Date": "Wed, 04 Mar 2026 20:38:44 GMT" + }, + "ResponseBody": { + "value": [ + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": { + "WorkspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "Application", + "createdAt": "2026-01-15T00:34:10.0773703Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "Application", + "lastModifiedAt": "2026-01-15T00:34:10.0773703Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-itworkrp114-4bncru", + "supercomputerIds": [ + "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/rp114-rg/providers/Microsoft.Discovery/supercomputers/itsuperp114" + ], + "workspaceApiUri": "https://itworkrp114.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/itworkrp114", + "workspaceIdentity": { + "id": "Sanitized" + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uk south", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-19T05:22:08.6568882Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-19T05:22:08.6568882Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wrksptest53-9ke163", + "supercomputerIds": [], + "workspaceApiUri": "https://wrksptest53.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wrksptest53", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "ed0545f1-159d-4851-9d51-38cace7712d7", + "clientId": "1764701a-3d97-4612-9134-bec5e4b4ae0e" + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": { + "SkipAssociateKeyVaultToNsp": "true" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-25T13:40:46.4913501Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-25T13:40:46.4913501Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wksp25021-9r5ibw", + "supercomputerIds": [], + "workspaceApiUri": "https://wksp25021.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp25021", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "76442198-a696-4763-a8da-c63b99d25643", + "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": { + "SkipAssociateKeyVaultToNsp": "true" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-02T20:01:56.834174Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-02T20:01:56.834174Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wksp03031-2788mh", + "supercomputerIds": [], + "workspaceApiUri": "https://wksp03031.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp03031", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "76442198-a696-4763-a8da-c63b99d25643", + "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet8", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": { + "SkipAssociateKeyVaultToNsp": "true" + }, + "systemData": { + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T11:23:08.6609326Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wksp04031-gaebre", + "supercomputerIds": [], + "workspaceApiUri": "https://wksp04031.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp04031", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "76442198-a696-4763-a8da-c63b99d25643", + "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet8", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": { + "SkipAssociateKeyVaultToNsp": "true", + "networkIsolation": "true" + }, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T15:12:31.9121216Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T15:12:31.9121216Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wksp04033-bs476h", + "supercomputerIds": [], + "workspaceApiUri": "https://wksp04033.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp04033", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "76442198-a696-4763-a8da-c63b99d25643", + "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet9", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Failed" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T19:36:15.7991238Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", + "supercomputerIds": [], + "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", + "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" + }, + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyName": "discoverykey", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "centraluseuap", + "tags": {}, + "systemData": { + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-04T09:19:20.1363722Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-wrksptest45-4rxpj0", + "supercomputerIds": [], + "workspaceApiUri": "https://wrksptest45.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wrksptest45", + "workspaceIdentity": { + "id": "Sanitized" + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + }, + { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "eastus", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-02-26T00:24:42.4610232Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-02-26T00:24:42.4610232Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-testwseusalch-69oxho", + "supercomputerIds": [], + "workspaceApiUri": "https://testwseusalch.workspace.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/testwseusalch", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "76442198-a696-4763-a8da-c63b99d25643", + "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" + }, + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Failed" + } + } + ] + } + } + ], + "Variables": {} +} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json new file mode 100644 index 000000000000..9fc4744b029a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json @@ -0,0 +1,84 @@ +{ + "Entries": [ + { + "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", + "RequestMethod": "PATCH", + "RequestHeaders": { + "Accept": "application/json", + "Connection": "keep-alive", + "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", + "Content-Type": "application/json", + "Content-Length": "105" + }, + "RequestBody": { + "properties": { + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVersion": "956de2fc802f49eba81ddcc348ebc27c" + } + } + }, + "StatusCode": 200, + "ResponseHeaders": { + "Cache-Control": "no-cache", + "Pragma": "no-cache", + "Content-Length": "1606", + "Content-Type": "application/json; charset=utf-8", + "Expires": "-1", + "ETag": "\"9c009492-0000-1000-0000-69a89c8d0000\"", + "x-ms-ratelimit-remaining-subscription-writes": "799", + "x-ms-providerhub-traffic": "True", + "x-ms-request-id": "cf2c1ada-fb0d-40fb-b828-62e1f202bc62", + "x-ms-correlation-request-id": "45f38a46-23f0-4c4b-bd4b-3bf715f1ce71", + "x-ms-routing-request-id": "EASTUS2EUAP:20260304T205645Z:45f38a46-23f0-4c4b-bd4b-3bf715f1ce71", + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + "X-Content-Type-Options": "nosniff", + "X-Cache": "CONFIG_NOCACHE", + "X-MSEdge-Ref": "Ref A: A7C13ADCCB2641748C4748520B0E5448 Ref B: SN4AA2022305021 Ref C: 2026-03-04T20:56:43Z", + "Date": "Wed, 04 Mar 2026 20:56:45 GMT" + }, + "ResponseBody": { + "id": "Sanitized", + "name": "Sanitized", + "type": "microsoft.discovery/workspaces", + "location": "uksouth", + "tags": {}, + "systemData": { + "createdBy": "Sanitized", + "createdByType": "User", + "createdAt": "2026-03-04T19:36:15.7991238Z", + "lastModifiedBy": "Sanitized", + "lastModifiedByType": "User", + "lastModifiedAt": "2026-03-04T20:56:44.2310241Z" + }, + "properties": { + "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", + "supercomputerIds": [], + "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", + "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", + "workspaceIdentity": { + "id": "Sanitized", + "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", + "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" + }, + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyName": "discoverykey", + "keyVersion": "956de2fc802f49eba81ddcc348ebc27c" + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "managedOnBehalfOfConfiguration": { + "moboBrokerResources": [ + { + "id": "Sanitized" + } + ] + }, + "provisioningState": "Succeeded" + } + } + } + ], + "Variables": {} +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py new file mode 100644 index 000000000000..6456b9bac60a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py @@ -0,0 +1,69 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Bookshelves operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP + + +class TestBookshelves(DiscoveryMgmtTestCase): + """Tests for Bookshelves operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = AZURE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_bookshelves_by_subscription(self): + """Test listing bookshelves in the subscription.""" + bookshelves = list(self.client.bookshelves.list_by_subscription()) + assert isinstance(bookshelves, list) + + @recorded_by_proxy + def test_list_bookshelves_by_resource_group(self): + """Test listing bookshelves in a resource group.""" + bookshelves = list(self.client.bookshelves.list_by_resource_group(self.resource_group)) + assert isinstance(bookshelves, list) + @recorded_by_proxy + def test_get_bookshelf(self): + """Test getting a specific bookshelf by name.""" + bookshelf = self.client.bookshelves.get(self.resource_group, "test-bookshelf-05fbc43d") + assert bookshelf is not None + assert hasattr(bookshelf, "name") + assert hasattr(bookshelf, "location") + @recorded_by_proxy + def test_create_bookshelf(self): + """Test creating a bookshelf.""" + bookshelf_data = {"location": "uksouth"} + operation = self.client.bookshelves.begin_create_or_update( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-324938be", + resource=bookshelf_data, + ) + bookshelf = operation.result() + assert bookshelf is not None + @recorded_by_proxy + def test_update_bookshelf(self): + """Test updating a bookshelf.""" + bookshelf_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.bookshelves.begin_update( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-05fbc43d", + properties=bookshelf_data, + ) + updated_bookshelf = operation.result() + assert updated_bookshelf is not None + @recorded_by_proxy + def test_delete_bookshelf(self): + """Test deleting a bookshelf.""" + operation = self.client.bookshelves.begin_delete( + resource_group_name="olawal", + bookshelf_name="test-bookshelf-9379e896", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py new file mode 100644 index 000000000000..4a7aac393d7b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py @@ -0,0 +1,64 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for ChatModelDeployments operations.""" +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group that has a workspace +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "test-wrksp-create01" + + +class TestChatModelDeployments(DiscoveryMgmtTestCase): + """Tests for ChatModelDeployments operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + + @recorded_by_proxy + def test_list_chat_model_deployments_by_workspace(self): + """Test listing chat model deployments in a workspace.""" + deployments = list( + self.client.chat_model_deployments.list_by_workspace(self.resource_group, self.workspace_name) + ) + assert isinstance(deployments, list) + @recorded_by_proxy + def test_get_chat_model_deployment(self): + """Test getting a specific chat model deployment by name.""" + deployment = self.client.chat_model_deployments.get(self.resource_group, self.workspace_name, "test-deploy-chatmodel01") + assert deployment is not None + assert hasattr(deployment, "name") + @recorded_by_proxy + def test_create_chat_model_deployment(self): + """Test creating a chat model deployment.""" + deployment_data = { + "location": "uksouth", + "properties": { + "modelFormat": "OpenAI", + "modelName": "gpt-4o" + } + } + operation = self.client.chat_model_deployments.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + chat_model_deployment_name="test-deploy-chatmodel01", + resource=deployment_data, + ) + deployment = operation.result() + assert deployment is not None + @recorded_by_proxy + def test_delete_chat_model_deployment(self): + """Test deleting a chat model deployment.""" + operation = self.client.chat_model_deployments.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + chat_model_deployment_name="test-deploy-chatmodel01", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py b/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py new file mode 100644 index 000000000000..37d3810e6a44 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py @@ -0,0 +1,364 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Hero Scenario Test: Run a Tool on Supercomputer + +This test demonstrates the complete end-to-end flow for the Discovery service: +1. Create a Workspace (ARM) +2. Create a Project in the workspace (ARM) +3. Create an Investigation in the workspace (Workspace client) +4. Run a Tool on Supercomputer (Workspace client) - THE HERO! +5. Check Run Status and wait for completion (Workspace client) +6. Query results from KnowledgeBase (Bookshelf client) + +This scenario requires real Azure resources and is intended to be run +in record mode to generate recordings for CI playback. + +HERO SCENARIO: "Run a Tool on Supercomputer" +This is the primary use case for the Discovery service - executing +scientific computing tools on Azure supercomputers. +""" +import os +import uuid +import pytest +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + + +# Test configuration +AZURE_LOCATION = os.environ.get("AZURE_LOCATION", "eastus") +AZURE_RESOURCE_GROUP = os.environ.get("AZURE_RESOURCE_GROUP", "olawal") +SUPERCOMPUTER_NAME = os.environ.get("SUPERCOMPUTER_NAME", "test-supercomputer") + + +class TestHeroScenario(AzureMgmtRecordedTestCase): + """ + Hero Scenario: Run a Tool on Supercomputer + + This test class validates the complete end-to-end Discovery workflow + using all three SDK clients: + + ARM Client (azure-mgmt-discovery): + - Create/manage Workspace + - Create/manage Project + - Access Supercomputer + + Workspace Client (azure-discovery-workspace): + - Create Investigation + - Run Tool on Supercomputer + - Monitor Run Status + - Manage Tasks + + Bookshelf Client (azure-discovery-bookshelf): + - Query KnowledgeBase for insights + - Search results + """ + + def setup_method(self, method): + """Set up test resources.""" + self.test_run_id = str(uuid.uuid4())[:8] + self.workspace_name = f"test-workspace-{self.test_run_id}" + self.project_name = f"test-project-{self.test_run_id}" + self.investigation_name = f"test-investigation-{self.test_run_id}" + + def create_mgmt_client(self): + """Create the ARM management client.""" + from azure.mgmt.discovery import DiscoveryMgmtClient + + return self.create_mgmt_client(DiscoveryMgmtClient) + + # ========================================================================= + # UNIT TESTS - Validate API Surface + # ========================================================================= + + def test_arm_client_has_workspace_operations(self): + """Validate ARM client exposes workspace operations.""" + from azure.mgmt.discovery import DiscoveryClient + + # Just verify the class structure - no actual API calls + assert hasattr(DiscoveryClient, "__init__") + # The client should have workspaces property when instantiated + + def test_arm_client_has_project_operations(self): + """Validate ARM client exposes project operations.""" + from azure.mgmt.discovery import DiscoveryClient + + assert hasattr(DiscoveryClient, "__init__") + + def test_arm_client_has_supercomputer_operations(self): + """Validate ARM client exposes supercomputer operations.""" + from azure.mgmt.discovery import DiscoveryClient + + assert hasattr(DiscoveryClient, "__init__") + + def test_workspace_client_has_investigation_operations(self): + """Validate Workspace client exposes investigation operations.""" + from azure.ai.discovery import WorkspaceClient + + assert hasattr(WorkspaceClient, "__init__") + + def test_workspace_client_has_tools_operations(self): + """Validate Workspace client exposes tools operations for running on supercomputer.""" + from azure.ai.discovery import WorkspaceClient + + assert hasattr(WorkspaceClient, "__init__") + + def test_bookshelf_client_has_knowledge_base_operations(self): + """Validate Bookshelf client exposes knowledge base operations.""" + from azure.ai.discovery import BookshelfClient + + assert hasattr(BookshelfClient, "__init__") + + # ========================================================================= + # HERO SCENARIO FLOW DOCUMENTATION + # ========================================================================= + + def test_hero_scenario_flow_documentation(self): + """ + Document the complete hero scenario flow. + + This test serves as executable documentation of the 10-step + hero scenario for running a tool on a supercomputer. + """ + hero_scenario_steps = [ + { + "step": 1, + "name": "Create Workspace", + "client": "ARM (DiscoveryMgmtClient)", + "operation": "workspaces.begin_create_or_update", + "description": "Create an Azure Discovery Workspace to organize resources", + }, + { + "step": 2, + "name": "Create Project", + "client": "ARM (DiscoveryMgmtClient)", + "operation": "projects.begin_create_or_update", + "description": "Create a Project within the Workspace for logical grouping", + }, + { + "step": 3, + "name": "Get Supercomputer", + "client": "ARM (DiscoveryMgmtClient)", + "operation": "supercomputers.get", + "description": "Get reference to an existing Supercomputer for compute", + }, + { + "step": 4, + "name": "Get Node Pool", + "client": "ARM (DiscoveryMgmtClient)", + "operation": "node_pools.list_by_supercomputer", + "description": "Get available node pools for running tools", + }, + { + "step": 5, + "name": "Get Tool Definition", + "client": "ARM (DiscoveryMgmtClient)", + "operation": "tools.get", + "description": "Get the tool to run (e.g., molecular dynamics simulation)", + }, + { + "step": 6, + "name": "Create Investigation", + "client": "Workspace (WorkspaceClient)", + "operation": "investigations.create_or_update", + "description": "Create an Investigation to track the scientific workflow", + }, + { + "step": 7, + "name": "Run Tool on Supercomputer", + "client": "Workspace (WorkspaceClient)", + "operation": "tools.begin_run", + "description": "THE HERO - Execute the tool on supercomputer nodes", + }, + { + "step": 8, + "name": "Monitor Run Status", + "client": "Workspace (WorkspaceClient)", + "operation": "tools.get_run_status", + "description": "Poll for completion of the tool run", + }, + { + "step": 9, + "name": "Create Task for Results", + "client": "Workspace (WorkspaceClient)", + "operation": "tasks.create", + "description": "Create a task to process and analyze results", + }, + { + "step": 10, + "name": "Query Knowledge Base", + "client": "Bookshelf (BookshelfClient)", + "operation": "knowledge_base_versions.search", + "description": "Search knowledge base for insights from the run", + }, + ] + + # Validate all steps are documented + assert len(hero_scenario_steps) == 10, "Hero scenario has 10 steps" + + # Validate step structure + for step in hero_scenario_steps: + assert "step" in step, "step number" + assert "name" in step, "step name" + assert "client" in step, "client name" + assert "operation" in step, "operation name" + assert "description" in step, "description" + + # Print the flow for documentation + print("\n=== HERO SCENARIO: Run Tool on Supercomputer ===\n") + for step in hero_scenario_steps: + print(f"Step {step['step']}: {step['name']}") + print(f" Client: {step['client']}") + print(f" Operation: {step['operation']}") + print(f" {step['description']}\n") + + # ========================================================================= + # RECORDED INTEGRATION TESTS + # ========================================================================= + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step1_create_workspace(self): + """Step 1: Create a Workspace via ARM.""" + from azure.mgmt.discovery import DiscoveryMgmtClient + + client = self.create_mgmt_client(DiscoveryMgmtClient) + + # Create workspace + poller = client.workspaces.begin_create_or_update( + resource_group_name=AZURE_RESOURCE_GROUP, + workspace_name=self.workspace_name, + resource={"location": AZURE_LOCATION, "properties": {}}, + ) + workspace = poller.result() + + assert workspace is not None + assert workspace.name == self.workspace_name + assert workspace.location == AZURE_LOCATION + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step2_create_project(self): + """Step 2: Create a Project in the Workspace via ARM.""" + from azure.mgmt.discovery import DiscoveryMgmtClient + + client = self.create_mgmt_client(DiscoveryMgmtClient) + + # Create project + poller = client.projects.begin_create_or_update( + resource_group_name=AZURE_RESOURCE_GROUP, + workspace_name=self.workspace_name, + project_name=self.project_name, + resource={"location": AZURE_LOCATION, "properties": {}}, + ) + project = poller.result() + + assert project is not None + assert project.name == self.project_name + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step3_verify_supercomputer(self): + """Step 3: Verify Supercomputer exists.""" + from azure.mgmt.discovery import DiscoveryMgmtClient + + client = self.create_mgmt_client(DiscoveryMgmtClient) + + # Try to get supercomputer + try: + supercomputer = client.supercomputers.get( + resource_group_name=AZURE_RESOURCE_GROUP, + supercomputer_name=SUPERCOMPUTER_NAME, + ) + assert supercomputer is not None + assert supercomputer.name == SUPERCOMPUTER_NAME + except Exception: + # If not found, list available supercomputers + supercomputers = list(client.supercomputers.list_by_subscription()) + print(f"Available supercomputers: {[s.name for s in supercomputers]}") + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step6_create_investigation(self): + """Step 6: Create an Investigation via Workspace client.""" + from azure.ai.discovery import WorkspaceClient + + workspace_endpoint = os.environ.get( + "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" + ) + client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) + + # Create investigation + investigation = client.investigations.create_or_update( + investigation_id=self.investigation_name, + body={"name": self.investigation_name, "description": "Hero scenario test investigation"}, + ) + + assert investigation is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step7_run_tool_on_supercomputer(self): + """ + Step 7: THE HERO - Run Tool on Supercomputer + + This is the core hero scenario - executing a scientific tool + on Azure supercomputer infrastructure. + + Prerequisites: + 1. Valid tool_id configured in the workspace + 2. Available node_pool_ids for compute allocation + 3. Tool runs consume compute resources (billable) + """ + from azure.ai.discovery import WorkspaceClient + + workspace_endpoint = os.environ.get( + "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" + ) + client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) + + # Run tool on supercomputer + tool_id = os.environ.get("TOOL_ID", "test-tool") + node_pool_id = os.environ.get("NODE_POOL_ID", "test-node-pool") + + poller = client.tools.begin_run( + body={ + "toolId": tool_id, + "nodePoolIds": [node_pool_id], + "parameters": {"input_file": "/data/input.dat", "output_dir": "/data/output"}, + } + ) + run_result = poller.result() + + assert run_result is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step8_monitor_run_status(self): + """Step 8: Monitor Tool Run Status.""" + from azure.ai.discovery import WorkspaceClient + + workspace_endpoint = os.environ.get( + "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" + ) + client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) + + run_id = os.environ.get("RUN_ID", "test-run-id") + status = client.tools.get_run_status(run_id=run_id) + + assert status is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_step10_query_knowledge_base(self): + """Step 10: Query Knowledge Base for insights.""" + from azure.ai.discovery import BookshelfClient + + bookshelf_endpoint = os.environ.get( + "AZURE_DISCOVERY_BOOKSHELF_ENDPOINT", "https://test.bookshelf.discovery.azure.com" + ) + client = BookshelfClient(endpoint=bookshelf_endpoint, credential=self.get_credential(BookshelfClient)) + + knowledge_base_name = os.environ.get("KNOWLEDGE_BASE_NAME", "test-kb") + version = os.environ.get("KNOWLEDGE_BASE_VERSION", "1") + + # Search the knowledge base + results = client.knowledge_base_versions.search( + name=knowledge_base_name, version=version, body={"query": "simulation results", "top": 10} + ) + + assert results is not None diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py new file mode 100644 index 000000000000..0aa7a40dfc08 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py @@ -0,0 +1,86 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for NodePools operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group and supercomputer that contain node pools +NODE_POOL_RESOURCE_GROUP = "olawal" +NODE_POOL_SUPERCOMPUTER_NAME = "itsuperp114" + + +class TestNodePools(DiscoveryMgmtTestCase): + """Tests for NodePools operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = NODE_POOL_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_node_pools_by_supercomputer(self): + """Test listing node pools in a supercomputer.""" + node_pools = list(self.client.node_pools.list_by_supercomputer("rp114-rg", NODE_POOL_SUPERCOMPUTER_NAME)) + assert isinstance(node_pools, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_node_pool(self): + """Test getting a specific node pool by name.""" + supercomputer_name = "test-supercomputer" + node_pool = self.client.node_pools.get(self.resource_group, supercomputer_name, "test-nodepool") + assert node_pool is not None + assert hasattr(node_pool, "name") + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_node_pool(self): + """Test creating a node pool.""" + supercomputer_name = "test-sc-2bbb25b8" + node_pool_data = { + "location": "uksouth", + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "vmSize": "Standard_D4s_v6", + "maxNodeCount": 3, + "minNodeCount": 1, + "scaleSetPriority": "Regular" + } + } + operation = self.client.node_pools.begin_create_or_update( + resource_group_name="olawal", + supercomputer_name=supercomputer_name, + node_pool_name="test-np-568f7883", + resource=node_pool_data, + ) + node_pool = operation.result() + assert node_pool is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_update_node_pool(self): + """Test updating a node pool.""" + supercomputer_name = "test-supercomputer" + node_pool_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.node_pools.begin_create_or_update( + resource_group_name=self.resource_group, + supercomputer_name=supercomputer_name, + node_pool_name="test-nodepool", + resource=node_pool_data, + ) + updated_node_pool = operation.result() + assert updated_node_pool is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_node_pool(self): + """Test deleting a node pool.""" + supercomputer_name = "test-supercomputer" + operation = self.client.node_pools.begin_delete( + resource_group_name=self.resource_group, + supercomputer_name=supercomputer_name, + node_pool_name="nodepool-to-delete", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py new file mode 100644 index 000000000000..512e0fd48d1a --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py @@ -0,0 +1,24 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Operations operations.""" +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +class TestOperations(DiscoveryMgmtTestCase): + """Tests for Operations operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + + @recorded_by_proxy + def test_list_operations(self): + """Test listing available API operations.""" + operations = list(self.client.operations.list()) + assert len(operations) > 0 + assert hasattr(operations[0], "name") diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py new file mode 100644 index 000000000000..01ceead5a2c2 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py @@ -0,0 +1,170 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Private Endpoint related operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group and resources for testing +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "wrksptest44" +BOOKSHELF_NAME = "test-bookshelf" + + +class TestPrivateEndpoints(DiscoveryMgmtTestCase): + """Tests for Private Endpoint related operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + self.bookshelf_name = BOOKSHELF_NAME + + # ============ Workspace Private Endpoint Connection Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_workspace_private_endpoint_connections(self): + """Test listing workspace private endpoint connections.""" + connections = list( + self.client.workspace_private_endpoint_connections.list_by_workspace( + self.resource_group, self.workspace_name + ) + ) + assert isinstance(connections, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_workspace_private_endpoint_connection(self): + """Test getting a workspace private endpoint connection.""" + connection_name = "test-pe-connection" + connection = self.client.workspace_private_endpoint_connections.get( + self.resource_group, self.workspace_name, connection_name + ) + assert connection is not None + assert hasattr(connection, "name") + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_workspace_private_endpoint_connection(self): + """Test creating a workspace private endpoint connection.""" + connection_name = "test-pe-connection" + connection_data = { + "properties": { + "privateLinkServiceConnectionState": { + "status": "Approved" + } + } + } + operation = self.client.workspace_private_endpoint_connections.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + private_endpoint_connection_name=connection_name, + resource=connection_data, + ) + connection = operation.result() + assert connection is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_workspace_private_endpoint_connection(self): + """Test deleting a workspace private endpoint connection.""" + connection_name = "pe-conn-to-delete" + operation = self.client.workspace_private_endpoint_connections.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + private_endpoint_connection_name=connection_name, + ) + operation.result() + + # ============ Workspace Private Link Resource Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_workspace_private_link_resources(self): + """Test listing workspace private link resources.""" + link_resources = list( + self.client.workspace_private_link_resources.list_by_workspace(self.resource_group, self.workspace_name) + ) + assert isinstance(link_resources, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_workspace_private_link_resource(self): + """Test getting a workspace private link resource.""" + link_resource_name = "workspace" + link_resource = self.client.workspace_private_link_resources.get( + self.resource_group, self.workspace_name, link_resource_name + ) + assert link_resource is not None + + # ============ Bookshelf Private Endpoint Connection Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_bookshelf_private_endpoint_connections(self): + """Test listing bookshelf private endpoint connections.""" + connections = list( + self.client.bookshelf_private_endpoint_connections.list_by_bookshelf( + self.resource_group, self.bookshelf_name + ) + ) + assert isinstance(connections, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_bookshelf_private_endpoint_connection(self): + """Test getting a bookshelf private endpoint connection.""" + connection_name = "test-pe-connection" + connection = self.client.bookshelf_private_endpoint_connections.get( + self.resource_group, self.bookshelf_name, connection_name + ) + assert connection is not None + assert hasattr(connection, "name") + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_bookshelf_private_endpoint_connection(self): + """Test creating a bookshelf private endpoint connection.""" + connection_name = "test-pe-connection" + connection_data = { + "properties": { + "privateLinkServiceConnectionState": { + "status": "Approved" + } + } + } + operation = self.client.bookshelf_private_endpoint_connections.begin_create_or_update( + resource_group_name=self.resource_group, + bookshelf_name=self.bookshelf_name, + private_endpoint_connection_name=connection_name, + resource=connection_data, + ) + connection = operation.result() + assert connection is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_bookshelf_private_endpoint_connection(self): + """Test deleting a bookshelf private endpoint connection.""" + connection_name = "pe-conn-to-delete" + operation = self.client.bookshelf_private_endpoint_connections.begin_delete( + resource_group_name=self.resource_group, + bookshelf_name=self.bookshelf_name, + private_endpoint_connection_name=connection_name, + ) + operation.result() + + # ============ Bookshelf Private Link Resource Tests ============ + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_list_bookshelf_private_link_resources(self): + """Test listing bookshelf private link resources.""" + link_resources = list( + self.client.bookshelf_private_link_resources.list_by_bookshelf(self.resource_group, self.bookshelf_name) + ) + assert isinstance(link_resources, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_bookshelf_private_link_resource(self): + """Test getting a bookshelf private link resource.""" + link_resource_name = "bookshelf" + link_resource = self.client.bookshelf_private_link_resources.get( + self.resource_group, self.bookshelf_name, link_resource_name + ) + assert link_resource is not None diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py new file mode 100644 index 000000000000..2aa018be84bc --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py @@ -0,0 +1,78 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Projects operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group that has a workspace +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "wrksptest44" + + +class TestProjects(DiscoveryMgmtTestCase): + """Tests for Projects operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + self.workspace_name = WORKSPACE_NAME + + @recorded_by_proxy + def test_list_projects_by_workspace(self): + """Test listing projects in a workspace.""" + projects = list(self.client.projects.list_by_workspace("newapiversiontest", self.workspace_name)) + assert isinstance(projects, list) + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_get_project(self): + """Test getting a specific project by name.""" + # TODO: Replace with actual project name from test environment + project = self.client.projects.get(self.resource_group, self.workspace_name, "test-project") + assert project is not None + assert hasattr(project, "name") + assert hasattr(project, "location") + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_create_project(self): + """Test creating a project.""" + unique_name = "test-proj-placeholder" + project_data = {"location": "uksouth"} + operation = self.client.projects.begin_create_or_update( + resource_group_name="olawal", + workspace_name=self.workspace_name, + project_name=unique_name, + resource=project_data, + ) + project = operation.result() + assert project is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_update_project(self): + """Test updating a project.""" + project_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.projects.begin_create_or_update( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + project_name="test-project", + resource=project_data, + ) + updated_project = operation.result() + assert updated_project is not None + @pytest.mark.skip(reason="no recording") + @recorded_by_proxy + def test_delete_project(self): + """Test deleting a project.""" + operation = self.client.projects.begin_delete( + resource_group_name=self.resource_group, + workspace_name=self.workspace_name, + project_name="project-to-delete", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py new file mode 100644 index 000000000000..5dfe21151402 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py @@ -0,0 +1,78 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Storage Assets operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group and storage container for storage asset tests +STORAGE_ASSET_RESOURCE_GROUP = "olawal" +STORAGE_ASSET_CONTAINER_NAME = "test-sc-8bef0d1a" + + +class TestStorageAssets(DiscoveryMgmtTestCase): + """Tests for Storage Assets operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = STORAGE_ASSET_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_storage_assets_by_storage_container(self): + """Test listing storage assets in a storage container.""" + assets = list(self.client.storage_assets.list_by_storage_container(self.resource_group, STORAGE_ASSET_CONTAINER_NAME)) + assert isinstance(assets, list) + @recorded_by_proxy + def test_get_storage_asset(self): + """Test getting a specific storage asset by name.""" + storage_container_name = "test-sc-8bef0d1a" + asset = self.client.storage_assets.get(self.resource_group, storage_container_name, "test-sa-482ad005") + assert asset is not None + assert hasattr(asset, "name") + @recorded_by_proxy + def test_create_storage_asset(self): + """Test creating a storage asset.""" + storage_container_name = "test-sc-8bef0d1a" + asset_data = { + "location": "uksouth", + "properties": { + "description": "Test storage asset for SDK validation", + "path": "data/test-assets" + } + } + operation = self.client.storage_assets.begin_create_or_update( + resource_group_name="olawal", + storage_container_name=storage_container_name, + storage_asset_name="test-sa-482ad005", + resource=asset_data, + ) + asset = operation.result() + assert asset is not None + @recorded_by_proxy + def test_update_storage_asset(self): + """Test updating a storage asset.""" + asset_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.storage_assets.begin_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + storage_asset_name="test-sa-482ad005", + properties=asset_data, + ) + updated_asset = operation.result() + assert updated_asset is not None + @recorded_by_proxy + def test_delete_storage_asset(self): + """Test deleting a storage asset.""" + operation = self.client.storage_assets.begin_delete( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + storage_asset_name="test-sa-482ad005", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py new file mode 100644 index 000000000000..b39681863d12 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py @@ -0,0 +1,79 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for StorageContainers operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group that contains storage containers +STORAGE_CONTAINER_RESOURCE_GROUP = "olawal" + + +class TestStorageContainers(DiscoveryMgmtTestCase): + """Tests for StorageContainers operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = STORAGE_CONTAINER_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_storage_containers_by_resource_group(self): + """Test listing storage containers in a resource group.""" + containers = list(self.client.storage_containers.list_by_resource_group(self.resource_group)) + assert isinstance(containers, list) + + @recorded_by_proxy + def test_list_storage_containers_by_subscription(self): + """Test listing storage containers in the subscription.""" + containers = list(self.client.storage_containers.list_by_subscription()) + assert isinstance(containers, list) + @recorded_by_proxy + def test_get_storage_container(self): + """Test getting a specific storage container by name.""" + container = self.client.storage_containers.get(self.resource_group, "test-sc-8bef0d1a") + assert container is not None + assert hasattr(container, "name") + @recorded_by_proxy + def test_create_storage_container(self): + """Test creating a storage container.""" + container_data = { + "location": "uksouth", + "properties": { + "storageStore": { + "kind": "AzureStorageBlob", + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr" + } + } + } + operation = self.client.storage_containers.begin_create_or_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + resource=container_data, + ) + container = operation.result() + assert container is not None + @recorded_by_proxy + def test_update_storage_container(self): + """Test updating a storage container.""" + container_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.storage_containers.begin_update( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + properties=container_data, + ) + updated_container = operation.result() + assert updated_container is not None + @recorded_by_proxy + def test_delete_storage_container(self): + """Test deleting a storage container.""" + operation = self.client.storage_containers.begin_delete( + resource_group_name="olawal", + storage_container_name="test-sc-8bef0d1a", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py new file mode 100644 index 000000000000..42d6e23d1aa9 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py @@ -0,0 +1,84 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Supercomputers operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + +# Resource group that contains supercomputers +SUPERCOMPUTER_RESOURCE_GROUP = "olawal" + + +class TestSupercomputers(DiscoveryMgmtTestCase): + """Tests for Supercomputers operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = SUPERCOMPUTER_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_supercomputers_by_resource_group(self): + """Test listing supercomputers in a resource group.""" + supercomputers = list(self.client.supercomputers.list_by_resource_group(self.resource_group)) + assert isinstance(supercomputers, list) + + @recorded_by_proxy + def test_list_supercomputers_by_subscription(self): + """Test listing supercomputers in the subscription.""" + supercomputers = list(self.client.supercomputers.list_by_subscription()) + assert isinstance(supercomputers, list) + @recorded_by_proxy + def test_get_supercomputer(self): + """Test getting a specific supercomputer by name.""" + supercomputer = self.client.supercomputers.get(self.resource_group, "test-sc-2bbb25b8") + assert supercomputer is not None + assert hasattr(supercomputer, "name") + assert hasattr(supercomputer, "location") + @recorded_by_proxy + def test_create_supercomputer(self): + """Test creating a supercomputer.""" + mi_id = "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" + supercomputer_data = { + "location": "uksouth", + "properties": { + "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "identities": { + "clusterIdentity": {"id": mi_id}, + "kubeletIdentity": {"id": mi_id}, + "workloadIdentities": {mi_id: {}} + } + } + } + operation = self.client.supercomputers.begin_create_or_update( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + resource=supercomputer_data, + ) + supercomputer = operation.result() + assert supercomputer is not None + @pytest.mark.skip(reason="server returns 400 on supercomputer PATCH - service-side bug") + @recorded_by_proxy + def test_update_supercomputer(self): + """Test updating a supercomputer.""" + supercomputer_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.supercomputers.begin_update( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + properties=supercomputer_data, + ) + updated_supercomputer = operation.result() + assert updated_supercomputer is not None + @recorded_by_proxy + def test_delete_supercomputer(self): + """Test deleting a supercomputer.""" + operation = self.client.supercomputers.begin_delete( + resource_group_name="olawal", + supercomputer_name="test-sc-2bbb25b8", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py new file mode 100644 index 000000000000..50f6e9de3f8c --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py @@ -0,0 +1,132 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Tools operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP + + +# Known tool name in the test environment +TOOL_NAME = "test-tool-50d87c62" + + +class TestTools(DiscoveryMgmtTestCase): + """Tests for Tools operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = AZURE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_tools_by_subscription(self): + """Test listing tools in the subscription.""" + tools = list(self.client.tools.list_by_subscription()) + assert isinstance(tools, list) + + @recorded_by_proxy + def test_list_tools_by_resource_group(self): + """Test listing tools in a resource group.""" + tools = list(self.client.tools.list_by_resource_group(self.resource_group)) + assert isinstance(tools, list) + + @recorded_by_proxy + def test_get_tool(self): + """Test getting a specific tool by name.""" + tool = self.client.tools.get(self.resource_group, TOOL_NAME) + assert tool is not None + # Don't assert on name since it may be sanitized in playback + assert hasattr(tool, "name") + assert hasattr(tool, "location") + @recorded_by_proxy + def test_create_tool(self): + """Test creating a tool.""" + tool_data = { + "location": "uksouth", + "properties": { + "version": "1.0.0", + "definitionContent": { + "name": "molpredictor", + "description": "Molecular property prediction for single SMILES strings.", + "version": "1.0.0", + "category": "cheminformatics", + "license": "MIT", + "infra": [ + { + "name": "worker", + "infra_type": "container", + "image": { + "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" + }, + "compute": { + "min_resources": { + "cpu": "1", + "ram": "1Gi", + "storage": "32", + "gpu": "0" + }, + "max_resources": { + "cpu": "2", + "ram": "1Gi", + "storage": "64", + "gpu": "0" + }, + "recommended_sku": ["Standard_D4s_v6"], + "pool_type": "static", + "pool_size": 1 + } + } + ], + "actions": [ + { + "name": "predict", + "description": "Predict molecular properties for SMILES strings.", + "input_schema": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + } + }, + "required": ["action"] + }, + "command": "python molpredictor.py --action {{ action }}", + "infra_node": "worker" + } + ] + } + } + } + operation = self.client.tools.begin_create_or_update( + resource_group_name="olawal", + tool_name="test-tool-50d87c62", + resource=tool_data, + ) + tool = operation.result() + assert tool is not None + @recorded_by_proxy + def test_update_tool(self): + """Test updating a tool.""" + tool_data = { + "tags": {"SkipAutoDeleteTill": "2026-12-31"}, + } + operation = self.client.tools.begin_update( + resource_group_name="olawal", + tool_name=TOOL_NAME, + properties=tool_data, + ) + updated_tool = operation.result() + assert updated_tool is not None + @recorded_by_proxy + def test_delete_tool(self): + """Test deleting a tool.""" + operation = self.client.tools.begin_delete( + resource_group_name="olawal", + tool_name=TOOL_NAME, + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py new file mode 100644 index 000000000000..7c108cdd9a33 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py @@ -0,0 +1,51 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Unit tests for azure-mgmt-discovery client. + +These tests verify client configuration without making HTTP calls. +""" +from azure.mgmt.discovery import DiscoveryClient + + +class TestDiscoveryClientUnit: + """Unit tests for Discovery management client initialization.""" + + def test_client_has_expected_operations(self): + """Test that client exposes expected operation groups.""" + from azure.identity import DefaultAzureCredential + + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-0000-0000-0000-000000000000", + ) + + # Verify operation groups exist + assert hasattr(client, "operations") + assert hasattr(client, "workspaces") + assert hasattr(client, "workspace_private_endpoint_connections") + assert hasattr(client, "workspace_private_link_resources") + assert hasattr(client, "bookshelves") + assert hasattr(client, "bookshelf_private_endpoint_connections") + assert hasattr(client, "bookshelf_private_link_resources") + assert hasattr(client, "projects") + assert hasattr(client, "storage_assets") + assert hasattr(client, "storage_containers") + assert hasattr(client, "tools") + assert hasattr(client, "supercomputers") + assert hasattr(client, "node_pools") + assert hasattr(client, "chat_model_deployments") + + def test_client_api_version(self): + """Test that client uses correct API version.""" + from azure.identity import DefaultAzureCredential + + client = DiscoveryClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-0000-0000-0000-000000000000", + ) + + # Verify API version is set + assert client._config.api_version == "2026-02-01-preview" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py new file mode 100644 index 000000000000..0563f868ab20 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_models.py @@ -0,0 +1,92 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Unit tests for azure-mgmt-discovery models. + +These tests verify model initialization without making HTTP calls. +""" +from azure.mgmt.discovery import models + + +class TestDiscoveryModelsUnit: + """Unit tests for Discovery management SDK models.""" + + def test_workspace_model_initialization(self): + """Test Workspace model can be initialized.""" + workspace = models.Workspace( + location="eastus", + properties=models.WorkspaceProperties(), + ) + assert workspace.location == "eastus" + assert workspace.properties is not None + + def test_identity_model(self): + """Test Identity model can be initialized.""" + identity = models.Identity( + id="/subscriptions/sub/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id", + ) + assert ( + identity.id + == "/subscriptions/sub/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id" + ) + + def test_bookshelf_model_initialization(self): + """Test Bookshelf model can be initialized.""" + bookshelf = models.Bookshelf( + location="eastus", + properties=models.BookshelfProperties(), + ) + assert bookshelf.location == "eastus" + assert bookshelf.properties is not None + + def test_project_model_initialization(self): + """Test Project model can be initialized.""" + project = models.Project( + location="eastus", + properties=models.ProjectProperties(), + ) + assert project.location == "eastus" + assert project.properties is not None + + def test_storage_asset_model_initialization(self): + """Test StorageAsset model can be initialized.""" + storage = models.StorageAsset( + location="eastus", + properties=models.StorageAssetProperties(), + ) + assert storage.location == "eastus" + assert storage.properties is not None + + def test_storage_container_model_initialization(self): + """Test StorageContainer model can be initialized.""" + container = models.StorageContainer( + properties=models.StorageContainerProperties(), + ) + assert container.properties is not None + + def test_tool_model_initialization(self): + """Test Tool model can be initialized.""" + tool = models.Tool( + location="eastus", + properties=models.ToolProperties(), + ) + assert tool.location == "eastus" + assert tool.properties is not None + + def test_supercomputer_model_initialization(self): + """Test Supercomputer model can be initialized.""" + supercomputer = models.Supercomputer( + location="eastus", + properties=models.SupercomputerProperties(), + ) + assert supercomputer.location == "eastus" + assert supercomputer.properties is not None + + def test_node_pool_model_initialization(self): + """Test NodePool model can be initialized.""" + node_pool = models.NodePool( + properties=models.NodePoolProperties(), + ) + assert node_pool.properties is not None diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py new file mode 100644 index 000000000000..6f8a6e30c3f6 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py @@ -0,0 +1,105 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Tests for Workspaces operations.""" +import pytest +from azure.mgmt.discovery import DiscoveryClient +from devtools_testutils import recorded_by_proxy + +from .testcase import DiscoveryMgmtTestCase + + +# Resource group and workspace that exist in the test environment +WORKSPACE_RESOURCE_GROUP = "olawal" +WORKSPACE_NAME = "test-wrksp-create01" + + +class TestWorkspaces(DiscoveryMgmtTestCase): + """Tests for Workspaces operations.""" + + def setup_method(self, method): + self.client = self.create_discovery_client(DiscoveryClient) + self.resource_group = WORKSPACE_RESOURCE_GROUP + + @recorded_by_proxy + def test_list_workspaces_by_subscription(self): + """Test listing workspaces in the subscription.""" + workspaces = list(self.client.workspaces.list_by_subscription()) + assert isinstance(workspaces, list) + assert len(workspaces) >= 1 + + @recorded_by_proxy + def test_list_workspaces_by_resource_group(self): + """Test listing workspaces in a resource group.""" + workspaces = list(self.client.workspaces.list_by_resource_group(self.resource_group)) + assert isinstance(workspaces, list) + assert len(workspaces) >= 1 + + @recorded_by_proxy + def test_get_workspace(self): + """Test getting a specific workspace by name.""" + workspace = self.client.workspaces.get(self.resource_group, WORKSPACE_NAME) + assert workspace is not None + # Don't assert on name since it may be sanitized in playback + assert hasattr(workspace, "name") + assert hasattr(workspace, "location") + @recorded_by_proxy + def test_create_workspace(self): + """Test creating a workspace.""" + workspace_name = "test-wrksp-create01" + workspace_data = { + "location": "uksouth", + "properties": { + "supercomputerIds": [], + "workspaceIdentity": { + "id": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" + }, + "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default3", + "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", + "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default2", + "customerManagedKeys": "Enabled", + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVaultUri": "https://newapik.vault.azure.net/", + "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906", + }, + "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", + "publicNetworkAccess": "Disabled", + } + } + operation = self.client.workspaces.begin_create_or_update( + resource_group_name="olawal", + workspace_name=workspace_name, + resource=workspace_data, + ) + workspace = operation.result() + assert workspace is not None + @recorded_by_proxy + def test_update_workspace(self): + """Test updating a workspace by changing the key vault key version.""" + # PATCH the workspace with the new key version + update_data = { + "properties": { + "keyVaultProperties": { + "keyName": "discoverykey", + "keyVersion": "956de2fc802f49eba81ddcc348ebc27c", + }, + }, + } + operation = self.client.workspaces.begin_update( + resource_group_name=self.resource_group, + workspace_name=WORKSPACE_NAME, + properties=update_data, + ) + updated_workspace = operation.result() + assert updated_workspace is not None + @recorded_by_proxy + def test_delete_workspace(self): + """Test deleting a workspace.""" + operation = self.client.workspaces.begin_delete( + resource_group_name="olawal", + workspace_name="test-wrksp-397d51cf", + ) + operation.result() diff --git a/sdk/discovery/azure-mgmt-discovery/tests/testcase.py b/sdk/discovery/azure-mgmt-discovery/tests/testcase.py new file mode 100644 index 000000000000..69da7fea4d3b --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/tests/testcase.py @@ -0,0 +1,37 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +""" +Base test class for azure-mgmt-discovery tests. + +Management SDK tests use AzureMgmtRecordedTestCase. +The 2026-02-01-preview API is currently behind a feature flag and requires +the EUAP (Early Update Access Program) endpoint. +""" +import os +from azure.identity import DefaultAzureCredential +from devtools_testutils import AzureMgmtRecordedTestCase + + +# EUAP endpoint required for 2026-02-01-preview API (feature-flagged) +AZURE_ARM_ENDPOINT = os.environ.get("AZURE_ARM_ENDPOINT", "https://eastus2euap.management.azure.com") +AZURE_LOCATION = os.environ.get("AZURE_LOCATION", "centraluseuap") + +# Test subscription and resource group with the feature flag enabled +AZURE_SUBSCRIPTION_ID = os.environ.get("AZURE_SUBSCRIPTION_ID", "31b0b6a5-2647-47eb-8a38-7d12047ee8ec") +AZURE_RESOURCE_GROUP = os.environ.get("AZURE_RESOURCE_GROUP", "olawal") + + +class DiscoveryMgmtTestCase(AzureMgmtRecordedTestCase): + """Base test class for Discovery management SDK tests. + + Configures the client to use the EUAP endpoint for the feature-flagged API. + """ + + def create_discovery_client(self, client_class): + """Create a Discovery client configured for the EUAP endpoint.""" + # Use environment variable for subscription or default + subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", AZURE_SUBSCRIPTION_ID) + credential = self.get_credential(client_class) + return client_class(credential=credential, subscription_id=subscription_id, base_url=AZURE_ARM_ENDPOINT) From 1a70ac9833960fa0a3dfafdb4e28cdffbf54e61e Mon Sep 17 00:00:00 2001 From: Oluwaseyi Lawal Date: Sat, 7 Mar 2026 11:34:37 -0600 Subject: [PATCH 4/9] Update CODEOWNERS and remove test_hero_scenario (missing dependency) --- .github/CODEOWNERS | 2 +- .../tests/test_hero_scenario.py | 364 ------------------ 2 files changed, 1 insertion(+), 365 deletions(-) delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 754a3a0056ef..acdfb06169fe 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -239,7 +239,7 @@ # ServiceLabel: %Discovery # PRLabel: %Discovery -/sdk/discovery/ @lawaloy @achocron @sylar217 +/sdk/discovery/ @oylawal @achocron # PRLabel: %Document Intelligence /sdk/documentintelligence/ @bojunehsu @yungshinlintw diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py b/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py deleted file mode 100644 index 37d3810e6a44..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_hero_scenario.py +++ /dev/null @@ -1,364 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -""" -Hero Scenario Test: Run a Tool on Supercomputer - -This test demonstrates the complete end-to-end flow for the Discovery service: -1. Create a Workspace (ARM) -2. Create a Project in the workspace (ARM) -3. Create an Investigation in the workspace (Workspace client) -4. Run a Tool on Supercomputer (Workspace client) - THE HERO! -5. Check Run Status and wait for completion (Workspace client) -6. Query results from KnowledgeBase (Bookshelf client) - -This scenario requires real Azure resources and is intended to be run -in record mode to generate recordings for CI playback. - -HERO SCENARIO: "Run a Tool on Supercomputer" -This is the primary use case for the Discovery service - executing -scientific computing tools on Azure supercomputers. -""" -import os -import uuid -import pytest -from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy - - -# Test configuration -AZURE_LOCATION = os.environ.get("AZURE_LOCATION", "eastus") -AZURE_RESOURCE_GROUP = os.environ.get("AZURE_RESOURCE_GROUP", "olawal") -SUPERCOMPUTER_NAME = os.environ.get("SUPERCOMPUTER_NAME", "test-supercomputer") - - -class TestHeroScenario(AzureMgmtRecordedTestCase): - """ - Hero Scenario: Run a Tool on Supercomputer - - This test class validates the complete end-to-end Discovery workflow - using all three SDK clients: - - ARM Client (azure-mgmt-discovery): - - Create/manage Workspace - - Create/manage Project - - Access Supercomputer - - Workspace Client (azure-discovery-workspace): - - Create Investigation - - Run Tool on Supercomputer - - Monitor Run Status - - Manage Tasks - - Bookshelf Client (azure-discovery-bookshelf): - - Query KnowledgeBase for insights - - Search results - """ - - def setup_method(self, method): - """Set up test resources.""" - self.test_run_id = str(uuid.uuid4())[:8] - self.workspace_name = f"test-workspace-{self.test_run_id}" - self.project_name = f"test-project-{self.test_run_id}" - self.investigation_name = f"test-investigation-{self.test_run_id}" - - def create_mgmt_client(self): - """Create the ARM management client.""" - from azure.mgmt.discovery import DiscoveryMgmtClient - - return self.create_mgmt_client(DiscoveryMgmtClient) - - # ========================================================================= - # UNIT TESTS - Validate API Surface - # ========================================================================= - - def test_arm_client_has_workspace_operations(self): - """Validate ARM client exposes workspace operations.""" - from azure.mgmt.discovery import DiscoveryClient - - # Just verify the class structure - no actual API calls - assert hasattr(DiscoveryClient, "__init__") - # The client should have workspaces property when instantiated - - def test_arm_client_has_project_operations(self): - """Validate ARM client exposes project operations.""" - from azure.mgmt.discovery import DiscoveryClient - - assert hasattr(DiscoveryClient, "__init__") - - def test_arm_client_has_supercomputer_operations(self): - """Validate ARM client exposes supercomputer operations.""" - from azure.mgmt.discovery import DiscoveryClient - - assert hasattr(DiscoveryClient, "__init__") - - def test_workspace_client_has_investigation_operations(self): - """Validate Workspace client exposes investigation operations.""" - from azure.ai.discovery import WorkspaceClient - - assert hasattr(WorkspaceClient, "__init__") - - def test_workspace_client_has_tools_operations(self): - """Validate Workspace client exposes tools operations for running on supercomputer.""" - from azure.ai.discovery import WorkspaceClient - - assert hasattr(WorkspaceClient, "__init__") - - def test_bookshelf_client_has_knowledge_base_operations(self): - """Validate Bookshelf client exposes knowledge base operations.""" - from azure.ai.discovery import BookshelfClient - - assert hasattr(BookshelfClient, "__init__") - - # ========================================================================= - # HERO SCENARIO FLOW DOCUMENTATION - # ========================================================================= - - def test_hero_scenario_flow_documentation(self): - """ - Document the complete hero scenario flow. - - This test serves as executable documentation of the 10-step - hero scenario for running a tool on a supercomputer. - """ - hero_scenario_steps = [ - { - "step": 1, - "name": "Create Workspace", - "client": "ARM (DiscoveryMgmtClient)", - "operation": "workspaces.begin_create_or_update", - "description": "Create an Azure Discovery Workspace to organize resources", - }, - { - "step": 2, - "name": "Create Project", - "client": "ARM (DiscoveryMgmtClient)", - "operation": "projects.begin_create_or_update", - "description": "Create a Project within the Workspace for logical grouping", - }, - { - "step": 3, - "name": "Get Supercomputer", - "client": "ARM (DiscoveryMgmtClient)", - "operation": "supercomputers.get", - "description": "Get reference to an existing Supercomputer for compute", - }, - { - "step": 4, - "name": "Get Node Pool", - "client": "ARM (DiscoveryMgmtClient)", - "operation": "node_pools.list_by_supercomputer", - "description": "Get available node pools for running tools", - }, - { - "step": 5, - "name": "Get Tool Definition", - "client": "ARM (DiscoveryMgmtClient)", - "operation": "tools.get", - "description": "Get the tool to run (e.g., molecular dynamics simulation)", - }, - { - "step": 6, - "name": "Create Investigation", - "client": "Workspace (WorkspaceClient)", - "operation": "investigations.create_or_update", - "description": "Create an Investigation to track the scientific workflow", - }, - { - "step": 7, - "name": "Run Tool on Supercomputer", - "client": "Workspace (WorkspaceClient)", - "operation": "tools.begin_run", - "description": "THE HERO - Execute the tool on supercomputer nodes", - }, - { - "step": 8, - "name": "Monitor Run Status", - "client": "Workspace (WorkspaceClient)", - "operation": "tools.get_run_status", - "description": "Poll for completion of the tool run", - }, - { - "step": 9, - "name": "Create Task for Results", - "client": "Workspace (WorkspaceClient)", - "operation": "tasks.create", - "description": "Create a task to process and analyze results", - }, - { - "step": 10, - "name": "Query Knowledge Base", - "client": "Bookshelf (BookshelfClient)", - "operation": "knowledge_base_versions.search", - "description": "Search knowledge base for insights from the run", - }, - ] - - # Validate all steps are documented - assert len(hero_scenario_steps) == 10, "Hero scenario has 10 steps" - - # Validate step structure - for step in hero_scenario_steps: - assert "step" in step, "step number" - assert "name" in step, "step name" - assert "client" in step, "client name" - assert "operation" in step, "operation name" - assert "description" in step, "description" - - # Print the flow for documentation - print("\n=== HERO SCENARIO: Run Tool on Supercomputer ===\n") - for step in hero_scenario_steps: - print(f"Step {step['step']}: {step['name']}") - print(f" Client: {step['client']}") - print(f" Operation: {step['operation']}") - print(f" {step['description']}\n") - - # ========================================================================= - # RECORDED INTEGRATION TESTS - # ========================================================================= - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step1_create_workspace(self): - """Step 1: Create a Workspace via ARM.""" - from azure.mgmt.discovery import DiscoveryMgmtClient - - client = self.create_mgmt_client(DiscoveryMgmtClient) - - # Create workspace - poller = client.workspaces.begin_create_or_update( - resource_group_name=AZURE_RESOURCE_GROUP, - workspace_name=self.workspace_name, - resource={"location": AZURE_LOCATION, "properties": {}}, - ) - workspace = poller.result() - - assert workspace is not None - assert workspace.name == self.workspace_name - assert workspace.location == AZURE_LOCATION - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step2_create_project(self): - """Step 2: Create a Project in the Workspace via ARM.""" - from azure.mgmt.discovery import DiscoveryMgmtClient - - client = self.create_mgmt_client(DiscoveryMgmtClient) - - # Create project - poller = client.projects.begin_create_or_update( - resource_group_name=AZURE_RESOURCE_GROUP, - workspace_name=self.workspace_name, - project_name=self.project_name, - resource={"location": AZURE_LOCATION, "properties": {}}, - ) - project = poller.result() - - assert project is not None - assert project.name == self.project_name - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step3_verify_supercomputer(self): - """Step 3: Verify Supercomputer exists.""" - from azure.mgmt.discovery import DiscoveryMgmtClient - - client = self.create_mgmt_client(DiscoveryMgmtClient) - - # Try to get supercomputer - try: - supercomputer = client.supercomputers.get( - resource_group_name=AZURE_RESOURCE_GROUP, - supercomputer_name=SUPERCOMPUTER_NAME, - ) - assert supercomputer is not None - assert supercomputer.name == SUPERCOMPUTER_NAME - except Exception: - # If not found, list available supercomputers - supercomputers = list(client.supercomputers.list_by_subscription()) - print(f"Available supercomputers: {[s.name for s in supercomputers]}") - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step6_create_investigation(self): - """Step 6: Create an Investigation via Workspace client.""" - from azure.ai.discovery import WorkspaceClient - - workspace_endpoint = os.environ.get( - "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" - ) - client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) - - # Create investigation - investigation = client.investigations.create_or_update( - investigation_id=self.investigation_name, - body={"name": self.investigation_name, "description": "Hero scenario test investigation"}, - ) - - assert investigation is not None - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step7_run_tool_on_supercomputer(self): - """ - Step 7: THE HERO - Run Tool on Supercomputer - - This is the core hero scenario - executing a scientific tool - on Azure supercomputer infrastructure. - - Prerequisites: - 1. Valid tool_id configured in the workspace - 2. Available node_pool_ids for compute allocation - 3. Tool runs consume compute resources (billable) - """ - from azure.ai.discovery import WorkspaceClient - - workspace_endpoint = os.environ.get( - "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" - ) - client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) - - # Run tool on supercomputer - tool_id = os.environ.get("TOOL_ID", "test-tool") - node_pool_id = os.environ.get("NODE_POOL_ID", "test-node-pool") - - poller = client.tools.begin_run( - body={ - "toolId": tool_id, - "nodePoolIds": [node_pool_id], - "parameters": {"input_file": "/data/input.dat", "output_dir": "/data/output"}, - } - ) - run_result = poller.result() - - assert run_result is not None - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step8_monitor_run_status(self): - """Step 8: Monitor Tool Run Status.""" - from azure.ai.discovery import WorkspaceClient - - workspace_endpoint = os.environ.get( - "AZURE_DISCOVERY_WORKSPACE_ENDPOINT", "https://test.workspace.discovery.azure.com" - ) - client = WorkspaceClient(endpoint=workspace_endpoint, credential=self.get_credential(WorkspaceClient)) - - run_id = os.environ.get("RUN_ID", "test-run-id") - status = client.tools.get_run_status(run_id=run_id) - - assert status is not None - @pytest.mark.skip(reason="no recording") - @recorded_by_proxy - def test_step10_query_knowledge_base(self): - """Step 10: Query Knowledge Base for insights.""" - from azure.ai.discovery import BookshelfClient - - bookshelf_endpoint = os.environ.get( - "AZURE_DISCOVERY_BOOKSHELF_ENDPOINT", "https://test.bookshelf.discovery.azure.com" - ) - client = BookshelfClient(endpoint=bookshelf_endpoint, credential=self.get_credential(BookshelfClient)) - - knowledge_base_name = os.environ.get("KNOWLEDGE_BASE_NAME", "test-kb") - version = os.environ.get("KNOWLEDGE_BASE_VERSION", "1") - - # Search the knowledge base - results = client.knowledge_base_versions.search( - name=knowledge_base_name, version=version, body={"query": "simulation results", "top": 10} - ) - - assert results is not None From bbfc8e62d3daec089acf76d92129e6bd2cd99d0c Mon Sep 17 00:00:00 2001 From: ChenxiJiang333 <119990644+ChenxiJiang333@users.noreply.github.com> Date: Mon, 9 Mar 2026 14:07:25 +0800 Subject: [PATCH 5/9] Rename title from 'DiscoveryClient' to 'DiscoveryMgmtClient' --- sdk/discovery/azure-mgmt-discovery/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/discovery/azure-mgmt-discovery/pyproject.toml b/sdk/discovery/azure-mgmt-discovery/pyproject.toml index bddd92973a29..48002281ebf8 100644 --- a/sdk/discovery/azure-mgmt-discovery/pyproject.toml +++ b/sdk/discovery/azure-mgmt-discovery/pyproject.toml @@ -83,4 +83,4 @@ need_msrestazure = false need_azuremgmtcore = true sample_link = "" exclude_folders = "" -title = "DiscoveryClient" +title = "DiscoveryMgmtClient" From 2d191b89dff13e442f2b769f3ef99e947b5d820e Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Mon, 9 Mar 2026 06:28:26 +0000 Subject: [PATCH 6/9] Configurations: 'specification/discovery/Discovery.Management/tspconfig.yaml', API Version: 2026-02-01-preview, SDK Release Type: beta, and CommitSHA: '402bf21e472d87c1b3abcd0b2675a8b6c8a42700' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=5979237 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. --- .../azure-mgmt-discovery/CHANGELOG.md | 2 +- sdk/discovery/azure-mgmt-discovery/README.md | 4 +- .../azure-mgmt-discovery/_metadata.json | 2 +- .../azure/mgmt/discovery/__init__.py | 4 +- .../azure/mgmt/discovery/_client.py | 6 +- .../azure/mgmt/discovery/_configuration.py | 4 +- .../azure/mgmt/discovery/aio/__init__.py | 4 +- .../azure/mgmt/discovery/aio/_client.py | 6 +- .../mgmt/discovery/aio/_configuration.py | 4 +- .../discovery/aio/operations/_operations.py | 58 +++++++++---------- .../mgmt/discovery/operations/_operations.py | 58 +++++++++---------- ...ctions_create_or_update_maximum_set_gen.py | 4 +- ...oint_connections_delete_maximum_set_gen.py | 4 +- ...ndpoint_connections_get_maximum_set_gen.py | 4 +- ...tions_list_by_bookshelf_maximum_set_gen.py | 4 +- ...vate_link_resources_get_maximum_set_gen.py | 4 +- ...urces_list_by_bookshelf_maximum_set_gen.py | 4 +- ...helves_create_or_update_maximum_set_gen.py | 4 +- .../bookshelves_delete_maximum_set_gen.py | 4 +- .../bookshelves_get_maximum_set_gen.py | 4 +- ..._list_by_resource_group_maximum_set_gen.py | 4 +- ...es_list_by_subscription_maximum_set_gen.py | 4 +- .../bookshelves_update_maximum_set_gen.py | 4 +- ...yments_create_or_update_maximum_set_gen.py | 4 +- ...odel_deployments_delete_maximum_set_gen.py | 4 +- ...t_model_deployments_get_maximum_set_gen.py | 4 +- ...ments_list_by_workspace_maximum_set_gen.py | 4 +- ...odel_deployments_update_maximum_set_gen.py | 4 +- ..._pools_create_or_update_maximum_set_gen.py | 4 +- .../node_pools_delete_maximum_set_gen.py | 4 +- .../node_pools_get_maximum_set_gen.py | 4 +- ...s_list_by_supercomputer_maximum_set_gen.py | 4 +- .../node_pools_update_maximum_set_gen.py | 4 +- .../operations_list_maximum_set_gen.py | 4 +- .../operations_list_minimum_set_gen.py | 4 +- ...ojects_create_or_update_maximum_set_gen.py | 4 +- .../projects_delete_maximum_set_gen.py | 4 +- .../projects_get_maximum_set_gen.py | 4 +- ...jects_list_by_workspace_maximum_set_gen.py | 4 +- .../projects_update_maximum_set_gen.py | 4 +- ...assets_create_or_update_maximum_set_gen.py | 4 +- .../storage_assets_delete_maximum_set_gen.py | 4 +- .../storage_assets_get_maximum_set_gen.py | 4 +- ...st_by_storage_container_maximum_set_gen.py | 4 +- .../storage_assets_update_maximum_set_gen.py | 4 +- ...ainers_create_or_update_maximum_set_gen.py | 4 +- ...orage_containers_delete_maximum_set_gen.py | 4 +- .../storage_containers_get_maximum_set_gen.py | 4 +- ..._list_by_resource_group_maximum_set_gen.py | 4 +- ...rs_list_by_subscription_maximum_set_gen.py | 4 +- ...orage_containers_update_maximum_set_gen.py | 4 +- ...puters_create_or_update_maximum_set_gen.py | 4 +- .../supercomputers_delete_maximum_set_gen.py | 4 +- .../supercomputers_get_maximum_set_gen.py | 4 +- ..._list_by_resource_group_maximum_set_gen.py | 4 +- ...rs_list_by_subscription_maximum_set_gen.py | 4 +- .../supercomputers_update_maximum_set_gen.py | 4 +- .../tools_create_or_update_maximum_set_gen.py | 4 +- .../tools_delete_maximum_set_gen.py | 4 +- .../tools_get_maximum_set_gen.py | 4 +- ..._list_by_resource_group_maximum_set_gen.py | 4 +- ...ls_list_by_subscription_maximum_set_gen.py | 4 +- .../tools_update_maximum_set_gen.py | 4 +- ...ctions_create_or_update_maximum_set_gen.py | 4 +- ...oint_connections_delete_maximum_set_gen.py | 4 +- ...ndpoint_connections_get_maximum_set_gen.py | 4 +- ...tions_list_by_workspace_maximum_set_gen.py | 4 +- ...vate_link_resources_get_maximum_set_gen.py | 4 +- ...urces_list_by_workspace_maximum_set_gen.py | 4 +- ...spaces_create_or_update_maximum_set_gen.py | 4 +- .../workspaces_delete_maximum_set_gen.py | 4 +- .../workspaces_get_maximum_set_gen.py | 4 +- ..._list_by_resource_group_maximum_set_gen.py | 4 +- ...es_list_by_subscription_maximum_set_gen.py | 4 +- .../workspaces_update_maximum_set_gen.py | 4 +- .../generated_tests/conftest.py | 16 ++--- ...rivate_endpoint_connections_operations.py} | 6 +- ..._endpoint_connections_operations_async.py} | 6 +- ...helf_private_link_resources_operations.py} | 6 +- ...rivate_link_resources_operations_async.py} | 6 +- ..._discovery_mgmt_bookshelves_operations.py} | 6 +- ...very_mgmt_bookshelves_operations_async.py} | 6 +- ...mgmt_chat_model_deployments_operations.py} | 6 +- ...hat_model_deployments_operations_async.py} | 6 +- ...t_discovery_mgmt_node_pools_operations.py} | 6 +- ...overy_mgmt_node_pools_operations_async.py} | 6 +- ...s.py => test_discovery_mgmt_operations.py} | 6 +- ...> test_discovery_mgmt_operations_async.py} | 6 +- ...est_discovery_mgmt_projects_operations.py} | 6 +- ...scovery_mgmt_projects_operations_async.py} | 6 +- ...scovery_mgmt_storage_assets_operations.py} | 6 +- ...y_mgmt_storage_assets_operations_async.py} | 6 +- ...ery_mgmt_storage_containers_operations.py} | 6 +- ...mt_storage_containers_operations_async.py} | 6 +- ...scovery_mgmt_supercomputers_operations.py} | 6 +- ...y_mgmt_supercomputers_operations_async.py} | 6 +- ...> test_discovery_mgmt_tools_operations.py} | 6 +- ..._discovery_mgmt_tools_operations_async.py} | 6 +- ...rivate_endpoint_connections_operations.py} | 6 +- ..._endpoint_connections_operations_async.py} | 6 +- ...pace_private_link_resources_operations.py} | 6 +- ...rivate_link_resources_operations_async.py} | 6 +- ...t_discovery_mgmt_workspaces_operations.py} | 6 +- ...overy_mgmt_workspaces_operations_async.py} | 6 +- .../tests/test_bookshelves.py | 4 ++ .../tests/test_chat_model_deployments.py | 15 +++-- .../tests/test_node_pools.py | 9 ++- .../tests/test_private_endpoints.py | 24 ++++---- .../tests/test_projects.py | 4 ++ .../tests/test_storage_assets.py | 13 +++-- .../tests/test_storage_containers.py | 9 ++- .../tests/test_supercomputers.py | 11 +++- .../azure-mgmt-discovery/tests/test_tools.py | 37 +++++------- .../tests/test_workspaces.py | 5 +- .../azure-mgmt-discovery/tsp-location.yaml | 2 +- 115 files changed, 370 insertions(+), 355 deletions(-) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelf_private_endpoint_connections_operations.py => test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py} (94%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelf_private_endpoint_connections_operations_async.py => test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py} (94%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelf_private_link_resources_operations.py => test_discovery_mgmt_bookshelf_private_link_resources_operations.py} (88%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelf_private_link_resources_operations_async.py => test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py} (87%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelves_operations.py => test_discovery_mgmt_bookshelves_operations.py} (97%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_bookshelves_operations_async.py => test_discovery_mgmt_bookshelves_operations_async.py} (97%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_chat_model_deployments_operations.py => test_discovery_mgmt_chat_model_deployments_operations.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_chat_model_deployments_operations_async.py => test_discovery_mgmt_chat_model_deployments_operations_async.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_node_pools_operations.py => test_discovery_mgmt_node_pools_operations.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_node_pools_operations_async.py => test_discovery_mgmt_node_pools_operations_async.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_operations.py => test_discovery_mgmt_operations.py} (85%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_operations_async.py => test_discovery_mgmt_operations_async.py} (84%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_projects_operations.py => test_discovery_mgmt_projects_operations.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_projects_operations_async.py => test_discovery_mgmt_projects_operations_async.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_storage_assets_operations.py => test_discovery_mgmt_storage_assets_operations.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_storage_assets_operations_async.py => test_discovery_mgmt_storage_assets_operations_async.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_storage_containers_operations.py => test_discovery_mgmt_storage_containers_operations.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_storage_containers_operations_async.py => test_discovery_mgmt_storage_containers_operations_async.py} (95%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_supercomputers_operations.py => test_discovery_mgmt_supercomputers_operations.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_supercomputers_operations_async.py => test_discovery_mgmt_supercomputers_operations_async.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_tools_operations.py => test_discovery_mgmt_tools_operations.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_tools_operations_async.py => test_discovery_mgmt_tools_operations_async.py} (96%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspace_private_endpoint_connections_operations.py => test_discovery_mgmt_workspace_private_endpoint_connections_operations.py} (94%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspace_private_endpoint_connections_operations_async.py => test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py} (94%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspace_private_link_resources_operations.py => test_discovery_mgmt_workspace_private_link_resources_operations.py} (88%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspace_private_link_resources_operations_async.py => test_discovery_mgmt_workspace_private_link_resources_operations_async.py} (87%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspaces_operations.py => test_discovery_mgmt_workspaces_operations.py} (97%) rename sdk/discovery/azure-mgmt-discovery/generated_tests/{test_discovery_workspaces_operations_async.py => test_discovery_mgmt_workspaces_operations_async.py} (97%) diff --git a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md index b1ff2b1ec490..e561ba6823b5 100644 --- a/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md +++ b/sdk/discovery/azure-mgmt-discovery/CHANGELOG.md @@ -1,6 +1,6 @@ # Release History -## 1.0.0b1 (2026-03-07) +## 1.0.0b1 (2026-03-09) ### Other Changes diff --git a/sdk/discovery/azure-mgmt-discovery/README.md b/sdk/discovery/azure-mgmt-discovery/README.md index 8e9a0612cb7e..0d5eadb64512 100644 --- a/sdk/discovery/azure-mgmt-discovery/README.md +++ b/sdk/discovery/azure-mgmt-discovery/README.md @@ -36,11 +36,11 @@ With above configuration, client can be authenticated by following code: ```python from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient import os sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") -client = DiscoveryClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +client = DiscoveryMgmtClient(credential=DefaultAzureCredential(), subscription_id=sub_id) ``` ## Examples diff --git a/sdk/discovery/azure-mgmt-discovery/_metadata.json b/sdk/discovery/azure-mgmt-discovery/_metadata.json index 9fa699c9eb60..d62706705837 100644 --- a/sdk/discovery/azure-mgmt-discovery/_metadata.json +++ b/sdk/discovery/azure-mgmt-discovery/_metadata.json @@ -3,7 +3,7 @@ "apiVersions": { "Microsoft.Discovery": "2026-02-01-preview" }, - "commit": "74cc90c49189a079b3cc93fde9c9ad76742f0184", + "commit": "402bf21e472d87c1b3abcd0b2675a8b6c8a42700", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "typespec_src": "specification/discovery/Discovery.Management", "emitterVersion": "0.60.2" diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py index ec6aee3555b7..75092cc57e16 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/__init__.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._client import DiscoveryClient # type: ignore +from ._client import DiscoveryMgmtClient # type: ignore from ._version import VERSION __version__ = VERSION @@ -25,7 +25,7 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "DiscoveryClient", + "DiscoveryMgmtClient", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py index 8877a29ffded..c2cb92254322 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_client.py @@ -17,7 +17,7 @@ from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy from azure.mgmt.core.tools import get_arm_endpoints -from ._configuration import DiscoveryClientConfiguration +from ._configuration import DiscoveryMgmtClientConfiguration from ._utils.serialization import Deserializer, Serializer from .operations import ( BookshelfPrivateEndpointConnectionsOperations, @@ -41,7 +41,7 @@ from azure.core.credentials import TokenCredential -class DiscoveryClient: # pylint: disable=too-many-instance-attributes +class DiscoveryMgmtClient: # pylint: disable=too-many-instance-attributes """Microsoft.Discovery Resource Provider management API. :ivar operations: Operations operations @@ -110,7 +110,7 @@ def __init__( if not base_url: base_url = _endpoints["resource_manager"] credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) - self._config = DiscoveryClientConfiguration( + self._config = DiscoveryMgmtClientConfiguration( credential=credential, subscription_id=subscription_id, base_url=cast(str, base_url), diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py index a3e76738a1ed..26f0ad9e8c9d 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/_configuration.py @@ -18,8 +18,8 @@ from azure.core.credentials import TokenCredential -class DiscoveryClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for DiscoveryClient. +class DiscoveryMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryMgmtClient. Note that all parameters used to create this instance are saved as instance attributes. diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py index ec0f970e7aa8..d47f0ad2b019 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/__init__.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._client import DiscoveryClient # type: ignore +from ._client import DiscoveryMgmtClient # type: ignore try: from ._patch import __all__ as _patch_all @@ -22,7 +22,7 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "DiscoveryClient", + "DiscoveryMgmtClient", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py index 754cf3700ee5..535bea73b97a 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_client.py @@ -18,7 +18,7 @@ from azure.mgmt.core.tools import get_arm_endpoints from .._utils.serialization import Deserializer, Serializer -from ._configuration import DiscoveryClientConfiguration +from ._configuration import DiscoveryMgmtClientConfiguration from .operations import ( BookshelfPrivateEndpointConnectionsOperations, BookshelfPrivateLinkResourcesOperations, @@ -41,7 +41,7 @@ from azure.core.credentials_async import AsyncTokenCredential -class DiscoveryClient: # pylint: disable=too-many-instance-attributes +class DiscoveryMgmtClient: # pylint: disable=too-many-instance-attributes """Microsoft.Discovery Resource Provider management API. :ivar operations: Operations operations @@ -111,7 +111,7 @@ def __init__( if not base_url: base_url = _endpoints["resource_manager"] credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) - self._config = DiscoveryClientConfiguration( + self._config = DiscoveryMgmtClientConfiguration( credential=credential, subscription_id=subscription_id, base_url=cast(str, base_url), diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py index 7a23d841e7f2..174be8f96273 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/_configuration.py @@ -18,8 +18,8 @@ from azure.core.credentials_async import AsyncTokenCredential -class DiscoveryClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for DiscoveryClient. +class DiscoveryMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DiscoveryMgmtClient. Note that all parameters used to create this instance are saved as instance attributes. diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py index d679affd9ddb..febeb13e63c7 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/aio/operations/_operations.py @@ -101,7 +101,7 @@ build_workspaces_list_by_subscription_request, build_workspaces_update_request, ) -from .._configuration import DiscoveryClientConfiguration +from .._configuration import DiscoveryMgmtClientConfiguration T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] @@ -115,14 +115,14 @@ class Operations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`operations` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -222,14 +222,14 @@ class BookshelvesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`bookshelves` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -1082,14 +1082,14 @@ class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`bookshelf_private_endpoint_connections` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -1662,14 +1662,14 @@ class BookshelfPrivateLinkResourcesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`bookshelf_private_link_resources` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -1857,14 +1857,14 @@ class ToolsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`tools` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -2699,14 +2699,14 @@ class ProjectsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`projects` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -3515,14 +3515,14 @@ class WorkspacesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`workspaces` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -4375,14 +4375,14 @@ class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`workspace_private_endpoint_connections` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -4955,14 +4955,14 @@ class ChatModelDeploymentsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`chat_model_deployments` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -5772,14 +5772,14 @@ class WorkspacePrivateLinkResourcesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`workspace_private_link_resources` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -5967,14 +5967,14 @@ class NodePoolsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`node_pools` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -6783,14 +6783,14 @@ class SupercomputersOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`supercomputers` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -7647,14 +7647,14 @@ class StorageAssetsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`storage_assets` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -8463,14 +8463,14 @@ class StorageContainersOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.aio.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.aio.DiscoveryMgmtClient`'s :attr:`storage_containers` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py index 1de2eb11a27d..ca47c6cd7b3d 100644 --- a/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/azure/mgmt/discovery/operations/_operations.py @@ -33,7 +33,7 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._configuration import DiscoveryClientConfiguration +from .._configuration import DiscoveryMgmtClientConfiguration from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from .._utils.serialization import Deserializer, Serializer @@ -1846,14 +1846,14 @@ class Operations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`operations` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -1953,14 +1953,14 @@ class BookshelvesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`bookshelves` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -2811,14 +2811,14 @@ class BookshelfPrivateEndpointConnectionsOperations: # pylint: disable=name-too **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`bookshelf_private_endpoint_connections` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -3391,14 +3391,14 @@ class BookshelfPrivateLinkResourcesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`bookshelf_private_link_resources` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -3585,14 +3585,14 @@ class ToolsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`tools` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -4419,14 +4419,14 @@ class ProjectsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`projects` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -5233,14 +5233,14 @@ class WorkspacesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`workspaces` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -6091,14 +6091,14 @@ class WorkspacePrivateEndpointConnectionsOperations: # pylint: disable=name-too **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`workspace_private_endpoint_connections` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -6671,14 +6671,14 @@ class ChatModelDeploymentsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`chat_model_deployments` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -7487,14 +7487,14 @@ class WorkspacePrivateLinkResourcesOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`workspace_private_link_resources` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -7681,14 +7681,14 @@ class NodePoolsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`node_pools` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -8497,14 +8497,14 @@ class SupercomputersOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`supercomputers` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -9355,14 +9355,14 @@ class StorageAssetsOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`storage_assets` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @@ -10171,14 +10171,14 @@ class StorageContainersOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.mgmt.discovery.DiscoveryClient`'s + :class:`~azure.mgmt.discovery.DiscoveryMgmtClient`'s :attr:`storage_containers` attribute. """ def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: DiscoveryClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._config: DiscoveryMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py index e2f9e9ac94a1..b1a9a95fd726 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py index c9a719dc6d4c..bf1ca146fb27 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py index d224abee6d1f..38ef3adf1175 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py index 9b462251578b..67eff6f5a4e1 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_endpoint_connections_list_by_bookshelf_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py index 0357315dab09..a3ec3fd8f140 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py index 1c85ad91e02f..0087d4a835e9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelf_private_link_resources_list_by_bookshelf_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py index 920d43f6b064..f1c14bda49cf 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py index f87c11d33dc6..b8074abb2c16 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py index 70ef4e234ec1..993bc6900627 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py index 149f8eae5cae..e60dc746e7a0 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_resource_group_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py index 13842b343a4f..5a4512bc50fb 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_list_by_subscription_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py index dc4dbe1d1600..fbbcb56f737c 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/bookshelves_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py index 33e660ba1b87..abb5d6ac4ffb 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_create_or_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py index abaed9cae0f6..cb9d3096e471 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py index 1a8de95ba342..6c7d81fc9de5 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py index 61061a168fe7..f8ccc2c039a0 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_list_by_workspace_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py index b20d727b632d..d8220d7d1385 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/chat_model_deployments_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py index 77e678efa54d..57e5417847ff 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py index 071e922722b4..18ff0c542244 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py index 5c92f9be70c8..cc4eea559b84 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py index 83673f49fe40..d0c7b340187c 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_list_by_supercomputer_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py index 2176048bf882..a809a66cf67a 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/node_pools_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py index 73094996d5e9..5cac15277941 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py index b39b5c36d6cc..e01c97ebd310 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/operations_list_minimum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py index c076216732c8..b63293a71157 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py index b2c5d06cd450..17d1115e8dcd 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py index c3179d81d311..4fb437af2aa9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py index 7af1aba95e82..f04684406333 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_list_by_workspace_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py index 9bf5bbb8a372..fe6b13aa1e12 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/projects_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py index ffa9f94615f3..3578a49e801a 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_create_or_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py index 4b6a8da8cfed..fc5746458aa0 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py index 2369e4aaad2c..87ad0438dda2 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py index 35c8b92a8742..7fe7724aff96 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_list_by_storage_container_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py index 0f845a2880ac..5dabb58daa9a 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_assets_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py index 72e2f118f656..5fc99f68f909 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py index 8c01e23707a7..d4c5a1a443e5 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py index 36a70da711b4..83ae374821d9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py index 18c3830b3c46..f41f373dbad8 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_resource_group_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py index adcd95999d69..65a16969c8f6 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_list_by_subscription_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py index 701ce91f8af7..1ab604fd2647 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/storage_containers_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py index 6dfc27a288aa..c8c562ce8946 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py index 406a01e13889..81dcaba509e9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py index 0342e2b93d69..09da375a6906 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py index c30b2f770dd1..f894d0623aab 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_resource_group_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py index 3887643ce905..358201caef65 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_list_by_subscription_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py index cff377e47d05..3c82b1402f8a 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/supercomputers_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py index 24c122430d83..8c9061c1a398 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py index 31c411792c4d..e38da9b4746d 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py index 3fd5e3176f41..a6886303264b 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py index 90da28f92a56..93839c36735f 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_resource_group_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py index a428d058d2e5..517f435b70ba 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_list_by_subscription_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py index 0bdd0a8002b9..49117ed70a14 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/tools_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py index 92fd9fc0afcd..8acb692f15d8 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_create_or_update_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py index 49c576e042da..5b8e2b7098e3 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py index eecd6bb9f1ac..33184a17ef9b 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py index 4078f65004ba..41412607fb1f 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_endpoint_connections_list_by_workspace_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py index 3e318f0033ec..6f8999ad1f72 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py index 798ab14432f5..f95206178269 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspace_private_link_resources_list_by_workspace_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py index 04c6770373e3..53403dda4f7a 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_create_or_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py index 987e9602cafb..c4e8bc28ce22 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_delete_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py index 8468a78b7fab..e8d134f88fc9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_get_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py index 3a5c9863a67b..d9ffa31bc738 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_resource_group_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py index 3d4beb74251a..2139ca564d99 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_list_by_subscription_maximum_set_gen.py @@ -8,7 +8,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -25,7 +25,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py index 6dfc35655023..cc7a66edafd7 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_samples/workspaces_update_maximum_set_gen.py @@ -9,7 +9,7 @@ from azure.identity import DefaultAzureCredential -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient """ # PREREQUISITES @@ -26,7 +26,7 @@ def main(): - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="SUBSCRIPTION_ID", ) diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py index 7ba6ee889813..94f7843fe01d 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/conftest.py @@ -21,14 +21,14 @@ # For security, please avoid record sensitive identity information in recordings @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): - discovery_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") - discovery_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") - discovery_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") - discovery_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=discovery_subscription_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=discovery_tenant_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=discovery_client_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=discovery_client_secret, value="00000000-0000-0000-0000-000000000000") + discoverymgmt_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + discoverymgmt_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=discoverymgmt_client_secret, value="00000000-0000-0000-0000-000000000000") add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") add_header_regex_sanitizer(key="Cookie", value="cookie;") diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py similarity index 94% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py index 740087be3f1b..cbbe2fbe4f14 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelfPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelfPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py similarity index 94% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py index 3354abbfdb64..66afc8eee4f9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_endpoint_connections_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_endpoint_connections_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelfPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelfPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py similarity index 88% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py index f4466a757f47..0d765dfbe829 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelfPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelfPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py similarity index 87% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py index 95007b53571b..5d40f6fa1cf6 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelf_private_link_resources_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelf_private_link_resources_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelfPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelfPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py similarity index 97% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py index 45a22b2f09d1..b05a5853b1eb 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelvesOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelvesOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py similarity index 97% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py index 17e9e09b7c43..a8c04de65510 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_bookshelves_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_bookshelves_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryBookshelvesOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtBookshelvesOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py index 7f3332215ea0..3bd49073761c 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryChatModelDeploymentsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtChatModelDeploymentsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py index 8d27d002cc8f..8d11142138ff 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_chat_model_deployments_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_chat_model_deployments_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryChatModelDeploymentsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtChatModelDeploymentsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py index 0f68af30f8c9..b417724dbac5 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryNodePoolsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtNodePoolsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py index f7b6f5384a68..d641d1fa68d9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_node_pools_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_node_pools_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryNodePoolsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtNodePoolsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py similarity index 85% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py index 2190847347aa..1123e4821f63 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py similarity index 84% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py index 4d76e32a16dc..54ee10145b6b 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py index 5571d5c4f8c5..db6c8817c96e 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryProjectsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtProjectsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py index 43b0c90c0639..64f11baba1d5 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_projects_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_projects_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryProjectsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtProjectsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py index ff7993dc6c6a..e047031e9f08 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryStorageAssetsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtStorageAssetsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py index d7ba3657d591..382cdd9eae09 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_assets_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_assets_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryStorageAssetsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtStorageAssetsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py index 2e46246149a9..0428af77d1a1 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryStorageContainersOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtStorageContainersOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py similarity index 95% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py index d35efe6ad6c5..a5e8e22dcf33 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_storage_containers_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_storage_containers_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryStorageContainersOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtStorageContainersOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py index 4032de062875..cc16b5e02832 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoverySupercomputersOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtSupercomputersOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py index d3bacbfcf71a..5a918c3a4769 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_supercomputers_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_supercomputers_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoverySupercomputersOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtSupercomputersOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py index 7e4ff1279741..9ffadde34092 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryToolsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtToolsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py similarity index 96% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py index 2617d3497b56..d07f79805da9 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_tools_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_tools_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryToolsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtToolsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py similarity index 94% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py index 160ed09ba273..1c14d49687f2 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacePrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacePrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py similarity index 94% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py index 2402290128ff..ca43727b70aa 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_endpoint_connections_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_endpoint_connections_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacePrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacePrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py similarity index 88% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py index 672f2facc64c..96e9b4b35cd6 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacePrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacePrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py similarity index 87% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py index 2275cb83c63c..4bc8d39d14f1 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspace_private_link_resources_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspace_private_link_resources_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacePrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacePrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py similarity index 97% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py index bccd1dfdde01..0c6ef85250d0 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy @@ -14,9 +14,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacesOperations(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacesOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient) + self.client = self.create_mgmt_client(DiscoveryMgmtClient) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py similarity index 97% rename from sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py rename to sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py index 594aa5967503..dbe007b808d0 100644 --- a/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_workspaces_operations_async.py +++ b/sdk/discovery/azure-mgmt-discovery/generated_tests/test_discovery_mgmt_workspaces_operations_async.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import pytest -from azure.mgmt.discovery.aio import DiscoveryClient +from azure.mgmt.discovery.aio import DiscoveryMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer from devtools_testutils.aio import recorded_by_proxy_async @@ -15,9 +15,9 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") -class TestDiscoveryWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): +class TestDiscoveryMgmtWorkspacesOperationsAsync(AzureMgmtRecordedTestCase): def setup_method(self, method): - self.client = self.create_mgmt_client(DiscoveryClient, is_async=True) + self.client = self.create_mgmt_client(DiscoveryMgmtClient, is_async=True) @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py index 6456b9bac60a..947f504c4748 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py @@ -28,6 +28,7 @@ def test_list_bookshelves_by_resource_group(self): """Test listing bookshelves in a resource group.""" bookshelves = list(self.client.bookshelves.list_by_resource_group(self.resource_group)) assert isinstance(bookshelves, list) + @recorded_by_proxy def test_get_bookshelf(self): """Test getting a specific bookshelf by name.""" @@ -35,6 +36,7 @@ def test_get_bookshelf(self): assert bookshelf is not None assert hasattr(bookshelf, "name") assert hasattr(bookshelf, "location") + @recorded_by_proxy def test_create_bookshelf(self): """Test creating a bookshelf.""" @@ -46,6 +48,7 @@ def test_create_bookshelf(self): ) bookshelf = operation.result() assert bookshelf is not None + @recorded_by_proxy def test_update_bookshelf(self): """Test updating a bookshelf.""" @@ -59,6 +62,7 @@ def test_update_bookshelf(self): ) updated_bookshelf = operation.result() assert updated_bookshelf is not None + @recorded_by_proxy def test_delete_bookshelf(self): """Test deleting a bookshelf.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py index 4a7aac393d7b..2399d037bd68 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py @@ -29,22 +29,20 @@ def test_list_chat_model_deployments_by_workspace(self): self.client.chat_model_deployments.list_by_workspace(self.resource_group, self.workspace_name) ) assert isinstance(deployments, list) + @recorded_by_proxy def test_get_chat_model_deployment(self): """Test getting a specific chat model deployment by name.""" - deployment = self.client.chat_model_deployments.get(self.resource_group, self.workspace_name, "test-deploy-chatmodel01") + deployment = self.client.chat_model_deployments.get( + self.resource_group, self.workspace_name, "test-deploy-chatmodel01" + ) assert deployment is not None assert hasattr(deployment, "name") + @recorded_by_proxy def test_create_chat_model_deployment(self): """Test creating a chat model deployment.""" - deployment_data = { - "location": "uksouth", - "properties": { - "modelFormat": "OpenAI", - "modelName": "gpt-4o" - } - } + deployment_data = {"location": "uksouth", "properties": {"modelFormat": "OpenAI", "modelName": "gpt-4o"}} operation = self.client.chat_model_deployments.begin_create_or_update( resource_group_name=self.resource_group, workspace_name=self.workspace_name, @@ -53,6 +51,7 @@ def test_create_chat_model_deployment(self): ) deployment = operation.result() assert deployment is not None + @recorded_by_proxy def test_delete_chat_model_deployment(self): """Test deleting a chat model deployment.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py index 0aa7a40dfc08..f2e766a6ac29 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -26,6 +27,7 @@ def test_list_node_pools_by_supercomputer(self): """Test listing node pools in a supercomputer.""" node_pools = list(self.client.node_pools.list_by_supercomputer("rp114-rg", NODE_POOL_SUPERCOMPUTER_NAME)) assert isinstance(node_pools, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_node_pool(self): @@ -34,6 +36,7 @@ def test_get_node_pool(self): node_pool = self.client.node_pools.get(self.resource_group, supercomputer_name, "test-nodepool") assert node_pool is not None assert hasattr(node_pool, "name") + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_create_node_pool(self): @@ -46,8 +49,8 @@ def test_create_node_pool(self): "vmSize": "Standard_D4s_v6", "maxNodeCount": 3, "minNodeCount": 1, - "scaleSetPriority": "Regular" - } + "scaleSetPriority": "Regular", + }, } operation = self.client.node_pools.begin_create_or_update( resource_group_name="olawal", @@ -57,6 +60,7 @@ def test_create_node_pool(self): ) node_pool = operation.result() assert node_pool is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_update_node_pool(self): @@ -73,6 +77,7 @@ def test_update_node_pool(self): ) updated_node_pool = operation.result() assert updated_node_pool is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_delete_node_pool(self): diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py index 01ceead5a2c2..fc677fd6bc18 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py @@ -36,6 +36,7 @@ def test_list_workspace_private_endpoint_connections(self): ) ) assert isinstance(connections, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_workspace_private_endpoint_connection(self): @@ -46,18 +47,13 @@ def test_get_workspace_private_endpoint_connection(self): ) assert connection is not None assert hasattr(connection, "name") + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_create_workspace_private_endpoint_connection(self): """Test creating a workspace private endpoint connection.""" connection_name = "test-pe-connection" - connection_data = { - "properties": { - "privateLinkServiceConnectionState": { - "status": "Approved" - } - } - } + connection_data = {"properties": {"privateLinkServiceConnectionState": {"status": "Approved"}}} operation = self.client.workspace_private_endpoint_connections.begin_create_or_update( resource_group_name=self.resource_group, workspace_name=self.workspace_name, @@ -66,6 +62,7 @@ def test_create_workspace_private_endpoint_connection(self): ) connection = operation.result() assert connection is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_delete_workspace_private_endpoint_connection(self): @@ -87,6 +84,7 @@ def test_list_workspace_private_link_resources(self): self.client.workspace_private_link_resources.list_by_workspace(self.resource_group, self.workspace_name) ) assert isinstance(link_resources, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_workspace_private_link_resource(self): @@ -108,6 +106,7 @@ def test_list_bookshelf_private_endpoint_connections(self): ) ) assert isinstance(connections, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_bookshelf_private_endpoint_connection(self): @@ -118,18 +117,13 @@ def test_get_bookshelf_private_endpoint_connection(self): ) assert connection is not None assert hasattr(connection, "name") + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_create_bookshelf_private_endpoint_connection(self): """Test creating a bookshelf private endpoint connection.""" connection_name = "test-pe-connection" - connection_data = { - "properties": { - "privateLinkServiceConnectionState": { - "status": "Approved" - } - } - } + connection_data = {"properties": {"privateLinkServiceConnectionState": {"status": "Approved"}}} operation = self.client.bookshelf_private_endpoint_connections.begin_create_or_update( resource_group_name=self.resource_group, bookshelf_name=self.bookshelf_name, @@ -138,6 +132,7 @@ def test_create_bookshelf_private_endpoint_connection(self): ) connection = operation.result() assert connection is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_delete_bookshelf_private_endpoint_connection(self): @@ -159,6 +154,7 @@ def test_list_bookshelf_private_link_resources(self): self.client.bookshelf_private_link_resources.list_by_bookshelf(self.resource_group, self.bookshelf_name) ) assert isinstance(link_resources, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_bookshelf_private_link_resource(self): diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py index 2aa018be84bc..2553dae7bfd4 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py @@ -28,6 +28,7 @@ def test_list_projects_by_workspace(self): """Test listing projects in a workspace.""" projects = list(self.client.projects.list_by_workspace("newapiversiontest", self.workspace_name)) assert isinstance(projects, list) + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_get_project(self): @@ -37,6 +38,7 @@ def test_get_project(self): assert project is not None assert hasattr(project, "name") assert hasattr(project, "location") + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_create_project(self): @@ -51,6 +53,7 @@ def test_create_project(self): ) project = operation.result() assert project is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_update_project(self): @@ -66,6 +69,7 @@ def test_update_project(self): ) updated_project = operation.result() assert updated_project is not None + @pytest.mark.skip(reason="no recording") @recorded_by_proxy def test_delete_project(self): diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py index 5dfe21151402..0684419bf58a 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py @@ -25,8 +25,11 @@ def setup_method(self, method): @recorded_by_proxy def test_list_storage_assets_by_storage_container(self): """Test listing storage assets in a storage container.""" - assets = list(self.client.storage_assets.list_by_storage_container(self.resource_group, STORAGE_ASSET_CONTAINER_NAME)) + assets = list( + self.client.storage_assets.list_by_storage_container(self.resource_group, STORAGE_ASSET_CONTAINER_NAME) + ) assert isinstance(assets, list) + @recorded_by_proxy def test_get_storage_asset(self): """Test getting a specific storage asset by name.""" @@ -34,16 +37,14 @@ def test_get_storage_asset(self): asset = self.client.storage_assets.get(self.resource_group, storage_container_name, "test-sa-482ad005") assert asset is not None assert hasattr(asset, "name") + @recorded_by_proxy def test_create_storage_asset(self): """Test creating a storage asset.""" storage_container_name = "test-sc-8bef0d1a" asset_data = { "location": "uksouth", - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets" - } + "properties": {"description": "Test storage asset for SDK validation", "path": "data/test-assets"}, } operation = self.client.storage_assets.begin_create_or_update( resource_group_name="olawal", @@ -53,6 +54,7 @@ def test_create_storage_asset(self): ) asset = operation.result() assert asset is not None + @recorded_by_proxy def test_update_storage_asset(self): """Test updating a storage asset.""" @@ -67,6 +69,7 @@ def test_update_storage_asset(self): ) updated_asset = operation.result() assert updated_asset is not None + @recorded_by_proxy def test_delete_storage_asset(self): """Test deleting a storage asset.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py index b39681863d12..dd4d89bc9749 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -31,12 +32,14 @@ def test_list_storage_containers_by_subscription(self): """Test listing storage containers in the subscription.""" containers = list(self.client.storage_containers.list_by_subscription()) assert isinstance(containers, list) + @recorded_by_proxy def test_get_storage_container(self): """Test getting a specific storage container by name.""" container = self.client.storage_containers.get(self.resource_group, "test-sc-8bef0d1a") assert container is not None assert hasattr(container, "name") + @recorded_by_proxy def test_create_storage_container(self): """Test creating a storage container.""" @@ -45,9 +48,9 @@ def test_create_storage_container(self): "properties": { "storageStore": { "kind": "AzureStorageBlob", - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr" + "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", } - } + }, } operation = self.client.storage_containers.begin_create_or_update( resource_group_name="olawal", @@ -56,6 +59,7 @@ def test_create_storage_container(self): ) container = operation.result() assert container is not None + @recorded_by_proxy def test_update_storage_container(self): """Test updating a storage container.""" @@ -69,6 +73,7 @@ def test_update_storage_container(self): ) updated_container = operation.result() assert updated_container is not None + @recorded_by_proxy def test_delete_storage_container(self): """Test deleting a storage container.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py index 42d6e23d1aa9..01f362897416 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. @@ -31,6 +32,7 @@ def test_list_supercomputers_by_subscription(self): """Test listing supercomputers in the subscription.""" supercomputers = list(self.client.supercomputers.list_by_subscription()) assert isinstance(supercomputers, list) + @recorded_by_proxy def test_get_supercomputer(self): """Test getting a specific supercomputer by name.""" @@ -38,6 +40,7 @@ def test_get_supercomputer(self): assert supercomputer is not None assert hasattr(supercomputer, "name") assert hasattr(supercomputer, "location") + @recorded_by_proxy def test_create_supercomputer(self): """Test creating a supercomputer.""" @@ -49,9 +52,9 @@ def test_create_supercomputer(self): "identities": { "clusterIdentity": {"id": mi_id}, "kubeletIdentity": {"id": mi_id}, - "workloadIdentities": {mi_id: {}} - } - } + "workloadIdentities": {mi_id: {}}, + }, + }, } operation = self.client.supercomputers.begin_create_or_update( resource_group_name="olawal", @@ -60,6 +63,7 @@ def test_create_supercomputer(self): ) supercomputer = operation.result() assert supercomputer is not None + @pytest.mark.skip(reason="server returns 400 on supercomputer PATCH - service-side bug") @recorded_by_proxy def test_update_supercomputer(self): @@ -74,6 +78,7 @@ def test_update_supercomputer(self): ) updated_supercomputer = operation.result() assert updated_supercomputer is not None + @recorded_by_proxy def test_delete_supercomputer(self): """Test deleting a supercomputer.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py index 50f6e9de3f8c..6e3feb23cce5 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py @@ -42,6 +42,7 @@ def test_get_tool(self): # Don't assert on name since it may be sanitized in playback assert hasattr(tool, "name") assert hasattr(tool, "location") + @recorded_by_proxy def test_create_tool(self): """Test creating a tool.""" @@ -59,26 +60,14 @@ def test_create_tool(self): { "name": "worker", "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, + "image": {"acr": "demodiscoveryacr.azurecr.io/molpredictor:latest"}, "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, + "min_resources": {"cpu": "1", "ram": "1Gi", "storage": "32", "gpu": "0"}, + "max_resources": {"cpu": "2", "ram": "1Gi", "storage": "64", "gpu": "0"}, "recommended_sku": ["Standard_D4s_v6"], "pool_type": "static", - "pool_size": 1 - } + "pool_size": 1, + }, } ], "actions": [ @@ -90,17 +79,17 @@ def test_create_tool(self): "properties": { "action": { "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" + "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]", } }, - "required": ["action"] + "required": ["action"], }, "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" + "infra_node": "worker", } - ] - } - } + ], + }, + }, } operation = self.client.tools.begin_create_or_update( resource_group_name="olawal", @@ -109,6 +98,7 @@ def test_create_tool(self): ) tool = operation.result() assert tool is not None + @recorded_by_proxy def test_update_tool(self): """Test updating a tool.""" @@ -122,6 +112,7 @@ def test_update_tool(self): ) updated_tool = operation.result() assert updated_tool is not None + @recorded_by_proxy def test_delete_tool(self): """Test deleting a tool.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py index 6f8a6e30c3f6..aca6287cda32 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py @@ -45,6 +45,7 @@ def test_get_workspace(self): # Don't assert on name since it may be sanitized in playback assert hasattr(workspace, "name") assert hasattr(workspace, "location") + @recorded_by_proxy def test_create_workspace(self): """Test creating a workspace.""" @@ -67,7 +68,7 @@ def test_create_workspace(self): }, "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", "publicNetworkAccess": "Disabled", - } + }, } operation = self.client.workspaces.begin_create_or_update( resource_group_name="olawal", @@ -76,6 +77,7 @@ def test_create_workspace(self): ) workspace = operation.result() assert workspace is not None + @recorded_by_proxy def test_update_workspace(self): """Test updating a workspace by changing the key vault key version.""" @@ -95,6 +97,7 @@ def test_update_workspace(self): ) updated_workspace = operation.result() assert updated_workspace is not None + @recorded_by_proxy def test_delete_workspace(self): """Test deleting a workspace.""" diff --git a/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml index 7a15b77623fb..dfe03767464b 100644 --- a/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml +++ b/sdk/discovery/azure-mgmt-discovery/tsp-location.yaml @@ -1,5 +1,5 @@ directory: specification/discovery/Discovery.Management -commit: 74cc90c49189a079b3cc93fde9c9ad76742f0184 +commit: 402bf21e472d87c1b3abcd0b2675a8b6c8a42700 repo: Azure/azure-rest-api-specs additionalDirectories: - specification/discovery/Discovery.Management.Shared From 9b4304f31b3f94460fe03c566698c5dc3f2b85a7 Mon Sep 17 00:00:00 2001 From: ChenxiJiang333 Date: Mon, 9 Mar 2026 14:58:58 +0800 Subject: [PATCH 7/9] update --- .../azure-mgmt-discovery/tests/test_bookshelves.py | 4 ++-- .../tests/test_chat_model_deployments.py | 4 ++-- .../azure-mgmt-discovery/tests/test_node_pools.py | 4 ++-- .../azure-mgmt-discovery/tests/test_operations.py | 4 ++-- .../azure-mgmt-discovery/tests/test_private_endpoints.py | 4 ++-- sdk/discovery/azure-mgmt-discovery/tests/test_projects.py | 4 ++-- .../azure-mgmt-discovery/tests/test_storage_assets.py | 4 ++-- .../azure-mgmt-discovery/tests/test_storage_containers.py | 4 ++-- .../azure-mgmt-discovery/tests/test_supercomputers.py | 4 ++-- sdk/discovery/azure-mgmt-discovery/tests/test_tools.py | 4 ++-- .../azure-mgmt-discovery/tests/test_unit_client.py | 8 ++++---- .../azure-mgmt-discovery/tests/test_workspaces.py | 4 ++-- 12 files changed, 26 insertions(+), 26 deletions(-) diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py index 947f504c4748..b9c39c855581 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_bookshelves.py @@ -4,7 +4,7 @@ # ------------------------------------ """Tests for Bookshelves operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP @@ -14,7 +14,7 @@ class TestBookshelves(DiscoveryMgmtTestCase): """Tests for Bookshelves operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = AZURE_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py index 2399d037bd68..0d726ed40d65 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_chat_model_deployments.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. # ------------------------------------ """Tests for ChatModelDeployments operations.""" -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -18,7 +18,7 @@ class TestChatModelDeployments(DiscoveryMgmtTestCase): """Tests for ChatModelDeployments operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = WORKSPACE_RESOURCE_GROUP self.workspace_name = WORKSPACE_NAME diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py index f2e766a6ac29..afa6ba92a1ee 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_node_pools.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for NodePools operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -19,7 +19,7 @@ class TestNodePools(DiscoveryMgmtTestCase): """Tests for NodePools operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = NODE_POOL_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py index 512e0fd48d1a..4f03ed81416e 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_operations.py @@ -4,7 +4,7 @@ # Licensed under the MIT License. # ------------------------------------ """Tests for Operations operations.""" -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -14,7 +14,7 @@ class TestOperations(DiscoveryMgmtTestCase): """Tests for Operations operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) @recorded_by_proxy def test_list_operations(self): diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py index fc677fd6bc18..04c3c579c390 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_private_endpoints.py @@ -4,7 +4,7 @@ # ------------------------------------ """Tests for Private Endpoint related operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -20,7 +20,7 @@ class TestPrivateEndpoints(DiscoveryMgmtTestCase): """Tests for Private Endpoint related operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = WORKSPACE_RESOURCE_GROUP self.workspace_name = WORKSPACE_NAME self.bookshelf_name = BOOKSHELF_NAME diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py index 2553dae7bfd4..c1d04e720eda 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_projects.py @@ -4,7 +4,7 @@ # ------------------------------------ """Tests for Projects operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -19,7 +19,7 @@ class TestProjects(DiscoveryMgmtTestCase): """Tests for Projects operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = WORKSPACE_RESOURCE_GROUP self.workspace_name = WORKSPACE_NAME diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py index 0684419bf58a..ec7978c255d3 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_assets.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for Storage Assets operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -19,7 +19,7 @@ class TestStorageAssets(DiscoveryMgmtTestCase): """Tests for Storage Assets operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = STORAGE_ASSET_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py index dd4d89bc9749..ed1db8dc3b55 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_storage_containers.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for StorageContainers operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -18,7 +18,7 @@ class TestStorageContainers(DiscoveryMgmtTestCase): """Tests for StorageContainers operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = STORAGE_CONTAINER_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py index 01f362897416..94e5cedd641b 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_supercomputers.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for Supercomputers operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -18,7 +18,7 @@ class TestSupercomputers(DiscoveryMgmtTestCase): """Tests for Supercomputers operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = SUPERCOMPUTER_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py index 6e3feb23cce5..6952e6d458a0 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_tools.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for Tools operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase, AZURE_RESOURCE_GROUP @@ -19,7 +19,7 @@ class TestTools(DiscoveryMgmtTestCase): """Tests for Tools operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = AZURE_RESOURCE_GROUP @recorded_by_proxy diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py index 7c108cdd9a33..af5c5e0b6591 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_unit_client.py @@ -7,17 +7,17 @@ These tests verify client configuration without making HTTP calls. """ -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient -class TestDiscoveryClientUnit: +class TestDiscoveryMgmtClientUnit: """Unit tests for Discovery management client initialization.""" def test_client_has_expected_operations(self): """Test that client exposes expected operation groups.""" from azure.identity import DefaultAzureCredential - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="00000000-0000-0000-0000-000000000000", ) @@ -42,7 +42,7 @@ def test_client_api_version(self): """Test that client uses correct API version.""" from azure.identity import DefaultAzureCredential - client = DiscoveryClient( + client = DiscoveryMgmtClient( credential=DefaultAzureCredential(), subscription_id="00000000-0000-0000-0000-000000000000", ) diff --git a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py index aca6287cda32..f52087572a82 100644 --- a/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py +++ b/sdk/discovery/azure-mgmt-discovery/tests/test_workspaces.py @@ -5,7 +5,7 @@ # ------------------------------------ """Tests for Workspaces operations.""" import pytest -from azure.mgmt.discovery import DiscoveryClient +from azure.mgmt.discovery import DiscoveryMgmtClient from devtools_testutils import recorded_by_proxy from .testcase import DiscoveryMgmtTestCase @@ -20,7 +20,7 @@ class TestWorkspaces(DiscoveryMgmtTestCase): """Tests for Workspaces operations.""" def setup_method(self, method): - self.client = self.create_discovery_client(DiscoveryClient) + self.client = self.create_discovery_client(DiscoveryMgmtClient) self.resource_group = WORKSPACE_RESOURCE_GROUP @recorded_by_proxy From 58862d7c0adc2b7b32d133eed0b9095cf0ccea43 Mon Sep 17 00:00:00 2001 From: Oluwaseyi Lawal Date: Tue, 10 Mar 2026 11:20:56 -0500 Subject: [PATCH 8/9] Move test recordings to azure-sdk-assets repo and add assets.json --- .../azure-mgmt-discovery/assets.json | 6 + ...yTestBookshelvestest_create_bookshelf.json | 70 - ...yTestBookshelvestest_delete_bookshelf.json | 32 - ...s.pyTestBookshelvestest_get_bookshelf.json | 62 - ...st_list_bookshelves_by_resource_group.json | 184 -- ...test_list_bookshelves_by_subscription.json | 264 --- ...yTestBookshelvestest_update_bookshelf.json | 72 - ...entstest_create_chat_model_deployment.json | 60 - ...entstest_delete_chat_model_deployment.json | 32 - ...oymentstest_get_chat_model_deployment.json | 52 - ...t_chat_model_deployments_by_workspace.json | 54 - ...test_list_node_pools_by_supercomputer.json | 66 - ....pyTestOperationstest_list_operations.json | 1501 ------------ ...ojectstest_list_projects_by_workspace.json | 37 - ...torageAssetstest_create_storage_asset.json | 66 - ...torageAssetstest_delete_storage_asset.json | 38 - ...stStorageAssetstest_get_storage_asset.json | 54 - ...t_storage_assets_by_storage_container.json | 64 - ...torageAssetstest_update_storage_asset.json | 66 - ...ntainerstest_create_storage_container.json | 70 - ...ntainerstest_delete_storage_container.json | 32 - ...eContainerstest_get_storage_container.json | 56 - ..._storage_containers_by_resource_group.json | 66 - ...st_storage_containers_by_subscription.json | 85 - ...ntainerstest_update_storage_container.json | 68 - ...percomputerstest_create_supercomputer.json | 100 - ...percomputerstest_delete_supercomputer.json | 38 - ...tSupercomputerstest_get_supercomputer.json | 78 - ...list_supercomputers_by_resource_group.json | 88 - ...t_list_supercomputers_by_subscription.json | 268 --- ...est_tools.pyTestToolstest_create_tool.json | 175 -- ...est_tools.pyTestToolstest_delete_tool.json | 38 - .../test_tools.pyTestToolstest_get_tool.json | 109 - ...oolstest_list_tools_by_resource_group.json | 119 - ...tToolstest_list_tools_by_subscription.json | 2057 ----------------- ...est_tools.pyTestToolstest_update_tool.json | 121 - ...pyTestWorkspacestest_create_workspace.json | 97 - ...pyTestWorkspacestest_delete_workspace.json | 32 - ...es.pyTestWorkspacestest_get_workspace.json | 76 - ...est_list_workspaces_by_resource_group.json | 80 - ...stest_list_workspaces_by_subscription.json | 366 --- ...pyTestWorkspacestest_update_workspace.json | 84 - 42 files changed, 6 insertions(+), 7077 deletions(-) create mode 100644 sdk/discovery/azure-mgmt-discovery/assets.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json delete mode 100644 sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json diff --git a/sdk/discovery/azure-mgmt-discovery/assets.json b/sdk/discovery/azure-mgmt-discovery/assets.json new file mode 100644 index 000000000000..eb5fac668236 --- /dev/null +++ b/sdk/discovery/azure-mgmt-discovery/assets.json @@ -0,0 +1,6 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "python", + "TagPrefix": "python/discovery/azure-mgmt-discovery", + "Tag": "python/discovery/azure-mgmt-discovery_82e6ebe1f7" +} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json deleted file mode 100644 index 4e796804c5df..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_create_bookshelf.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-324938be?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "23", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "location": "uksouth" - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "652", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 20:10:13 GMT", - "ETag": "\"79033bd8-0000-1000-0000-69a5eea40000\"", - "Expires": "-1", - "mise-correlation-id": "c804aa00-55ce-47cc-95f5-9a7dd1976588", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "61c6eb23-7954-4e7b-b0d7-0d08b7e0d40b", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/679dd090-3515-49e4-97f5-5ea1a65da394", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "800", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T201013Z:61c6eb23-7954-4e7b-b0d7-0d08b7e0d40b", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 070542FAAA494B55B4E9EC4D9D7B6B43 Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:10:03Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:10:07.3380149Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json deleted file mode 100644 index 4ff62029e064..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_delete_bookshelf.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-9379e896?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Mon, 02 Mar 2026 20:46:09 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "fb6d510e-bb80-45ce-a5ab-ed9841d7dbf4", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T204610Z:fb6d510e-bb80-45ce-a5ab-ed9841d7dbf4", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: F97C127F618E4B0084727E7935FEB279 Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:46:09Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json deleted file mode 100644 index 2ab0ca4d669c..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_get_bookshelf.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-05fbc43d?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "617", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:19 GMT", - "ETag": "\"65016e0f-0000-1100-0000-69a5e2920000\"", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "0dd430d4-b434-4dfa-90df-9b60abf4a803", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165620Z:0dd430d4-b434-4dfa-90df-9b60abf4a803", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 1601489564644BE09FE97C6EB94742BD Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:19Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:01:52.2939662Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json deleted file mode 100644 index 958c0ae7acdc..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_resource_group.json +++ /dev/null @@ -1,184 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "3101", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:58:24 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "4d261cc9-dd97-4ec9-91bc-f08cfab70f35", - "x-ms-original-request-ids": [ - "ac4624a0-643a-478a-82b6-83e0652da0c5", - "b8e40bec-f950-450a-bca9-5799832c8e68", - "7b1fde99-c46e-4d8e-a107-b9118d09bfdd", - "7a29777c-4d4b-4083-8ef4-f88028dba681", - "db58c732-b73f-46b1-a06b-b6754775ff0a", - "26da5536-b8bc-440f-bf75-58d621cf1c56" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165824Z:4d261cc9-dd97-4ec9-91bc-f08cfab70f35", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 077581DCBB3E4E6C91B1438C9544A77F Ref B: SN4AA2022301039 Ref C: 2026-03-03T16:58:23Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:01:52.2939662Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:25:18.1136078Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:25:18.1136078Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-0b008665-8ndxj5", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-0b008665.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:28:46.0772936Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:28:46.0772936Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-df5e8667-f6m0ma", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-df5e8667.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:57:00.0609603Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:57:00.0609603Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-64969832-l9mm7k", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-64969832.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:10:07.3380149Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json deleted file mode 100644 index 05848e1713c9..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_list_bookshelves_by_subscription.json +++ /dev/null @@ -1,264 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/bookshelves?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "4849", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:55:31 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "5f8132ea-29c2-4f34-9b8c-cd7b9d01e43b", - "x-ms-original-request-ids": [ - "3a150c96-ed70-4d39-807f-7bea13e124a8", - "e68050ec-418a-4e8c-8f29-cc839fd05ca8", - "69a857b6-8305-46a5-9378-0a06b110e94b", - "487af5a5-7c30-4f78-b3f6-af5c9ec1cddd", - "a94c0f38-de23-417f-8c01-c0af2c593d1f", - "bc031b8c-7a13-495d-b7d3-fdd32d21a54f" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165531Z:5f8132ea-29c2-4f34-9b8c-cd7b9d01e43b", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 1763DBA97007446DA37E5CECF2F99B37 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:55:30Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2025-11-04T14:16:04.2743996Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2025-11-04T14:16:04.2743996Z" - }, - "properties": { - "provisioningState": "Succeeded", - "bookshelfUri": "https://itbshlfpsu11.bookshelf-dev.discovery.azure.com", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "managedResourceGroup": "itbshlfpsu11-mrg-pf3i44" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:20:20.0965772Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:20:20.0965772Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-itbshlfrp114-v9zm2i", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded", - "bookshelfUri": "https://itbshlfrp114.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-23T09:08:18.5211656Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-23T09:08:18.5211656Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-yaoswal-bookshelf-test-nxw403", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded", - "bookshelfUri": "https://yaoswal-bookshelf-test.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:01:52.2939662Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:01:52.2939662Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:25:18.1136078Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:25:18.1136078Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-0b008665-8ndxj5", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-0b008665.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:28:46.0772936Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:28:46.0772936Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-df5e8667-f6m0ma", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-df5e8667.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:57:00.0609603Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T19:57:00.0609603Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-64969832-l9mm7k", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-64969832.bookshelf-dev.discovery.azure.com" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:10:07.3380149Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:10:07.3380149Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-324938be-vh75b0", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded", - "bookshelfUri": "https://test-bookshelf-324938be.bookshelf-dev.discovery.azure.com" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json deleted file mode 100644 index fd7429b87ba1..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_bookshelves.pyTestBookshelvestest_update_bookshelf.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/bookshelves/test-bookshelf-05fbc43d?api-version=2026-02-01-preview", - "RequestMethod": "PATCH", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "46", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "652", - "Content-Type": "application/json; charset=utf-8", - "Date": "Thu, 05 Mar 2026 15:23:13 GMT", - "ETag": "\"17015f06-0000-1000-0000-69a99fe10000\"", - "Expires": "-1", - "mise-correlation-id": "4216893a-51d8-46da-9e42-614f457e280a", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "7bc71812-89c2-4531-a9e1-e0d436f95505", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/c33c19cb-fc33-4d80-a73e-6a2ebf88e3aa", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152313Z:7bc71812-89c2-4531-a9e1-e0d436f95505", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: F626F17645054505AA21DA12F32E0E24 Ref B: SN4AA2022303027 Ref C: 2026-03-05T15:23:09Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/bookshelves", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T19:01:52.2939662Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-05T15:23:10.8744515Z" - }, - "properties": { - "managedResourceGroup": "mrg-dbksf-test-bookshelf-05fbc43d-w1nf6q", - "bookshelfUri": "https://test-bookshelf-05fbc43d.bookshelf-dev.discovery.azure.com", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "workloadIdentities": {}, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json deleted file mode 100644 index e5a98622a580..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_create_chat_model_deployment.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", - "Content-Type": "application/json", - "Content-Length": "80" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "modelFormat": "OpenAI", - "modelName": "gpt-4o" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "625", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "ETag": "\"1407f7ee-0000-1000-0000-69a8a1690000\"", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", - "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "x-ms-ratelimit-remaining-subscription-writes": "800", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211730Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "Date": "Wed, 04 Mar 2026 21:17:30 GMT" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces/chatmodeldeployments", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T21:17:26.2873892Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" - }, - "properties": { - "modelFormat": "OpenAI", - "modelName": "gpt-4o", - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json deleted file mode 100644 index cfec2ed54381..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_delete_chat_model_deployment.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", - "Content-Length": "0" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Expires": "-1", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "e4a1b2c3-d4e5-6f7a-8b9c-0d1e2f3a4b5c", - "x-ms-correlation-request-id": "f5a6b7c8-d9e0-1f2a-3b4c-5d6e7f8a9b0c", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T212000Z:f5a6b7c8-d9e0-1f2a-3b4c-5d6e7f8a9b0c", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "Date": "Wed, 04 Mar 2026 21:20:00 GMT" - }, - "ResponseBody": null - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json deleted file mode 100644 index 6fa54aa0c108..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_get_chat_model_deployment.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments/test-deploy-chatmodel01?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "625", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "ETag": "\"1407f7ee-0000-1000-0000-69a8a1690000\"", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", - "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211830Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "Date": "Wed, 04 Mar 2026 21:18:30 GMT" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces/chatmodeldeployments", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T21:17:26.2873892Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" - }, - "properties": { - "modelFormat": "OpenAI", - "modelName": "gpt-4o", - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json deleted file mode 100644 index 48a8e98bacdd..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_chat_model_deployments.pyTestChatModelDeploymentstest_list_chat_model_deployments_by_workspace.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01/chatModelDeployments?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "dd68dafc-13fb-41a4-88d6-473d7f5a55aa", - "x-ms-correlation-request-id": "ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T211730Z:ba5d9da8-d1b1-4372-9221-70a801bbb0c5", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "Date": "Wed, 04 Mar 2026 21:17:30 GMT" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces/chatmodeldeployments", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T21:17:26.2873892Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T21:17:26.2873892Z" - }, - "properties": { - "modelFormat": "OpenAI", - "modelName": "gpt-4o", - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json deleted file mode 100644 index 3451ab14c904..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_node_pools.pyTestNodePoolstest_list_node_pools_by_supercomputer.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/rp114-rg/providers/Microsoft.Discovery/supercomputers/itsuperp114/nodePools?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "660", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 15:40:44 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "bd085dc4-1a43-4236-9291-456d3bfcb2c3", - "x-ms-original-request-ids": [ - "f33a7c16-e0db-4d84-aa70-abc4714ac281", - "4ff34f51-c5f8-47e8-90d6-1a12550356e0", - "64037399-fb5b-4862-8624-f49768a7efb0", - "7bdacbdd-c72e-4870-a182-b5c0587c4769", - "f1a6329f-a25e-4b3d-b480-9a78287e8c57", - "21131c19-ebab-446b-a948-68fe65f132f0" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T154044Z:bd085dc4-1a43-4236-9291-456d3bfcb2c3", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 5263AED01E924F51895C061BE882D02D Ref B: SN4AA2022304047 Ref C: 2026-03-02T15:40:44Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers/nodepools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:33:59.5340715Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:33:59.5340715Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-nodepool", - "vmSize": "Standard_D4s_v6", - "maxNodeCount": 3, - "minNodeCount": 1, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json deleted file mode 100644 index eb2c83e549ca..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_operations.pyTestOperationstest_list_operations.json +++ /dev/null @@ -1,1501 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/providers/Microsoft.Discovery/operations?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "29943", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 15:38:40 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "eb9a66a6-5d8e-4ccc-91c8-2fe6f009614d", - "x-ms-operation-identifier": "", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-tenant-reads": "2199", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T153841Z:eb9a66a6-5d8e-4ccc-91c8-2fe6f009614d", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: A75CCC15437A42808BE41661E746D215 Ref B: SN4AA2022302037 Ref C: 2026-03-02T15:38:41Z" - }, - "ResponseBody": { - "value": [ - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "Microsoft.Discovery", - "operation": "Register the Microsoft.Discovery", - "description": "Register the subscription for Microsoft.Discovery" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "Microsoft.Discovery", - "operation": "Unregister the Microsoft.Discovery", - "description": "Unregister the subscription for Microsoft.Discovery" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "locations/operationStatuses", - "operation": "read_operationStatuses", - "description": "read operationStatuses" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "locations/operationStatuses", - "operation": "write_operationStatuses", - "description": "write operationStatuses" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "operations", - "operation": "read_operations", - "description": "read operations" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "checkNameAvailability", - "operation": "action_checkNameAvailability", - "description": "action checkNameAvailability" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_ListBySubscription", - "description": "List Workspace resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_ListByResourceGroup", - "description": "List Workspace resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_Get", - "description": "Get a Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_CreateOrUpdate", - "description": "Create a Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_Delete", - "description": "Delete a Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces", - "operation": "Workspaces_Update", - "description": "Update a Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_ListBySubscription", - "description": "List Supercomputer resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_ListByResourceGroup", - "description": "List Supercomputer resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_Get", - "description": "Get a Supercomputer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_CreateOrUpdate", - "description": "Create a Supercomputer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_Delete", - "description": "Delete a Supercomputer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers", - "operation": "Supercomputers_Update", - "description": "Update a Supercomputer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_ListBySubscription", - "description": "List Storage resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_ListByResourceGroup", - "description": "List Storage resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_Get", - "description": "Get a Storage" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_CreateOrUpdate", - "description": "Create a Storage" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_Delete", - "description": "Delete a Storage" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_Update", - "description": "Update a Storage" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_ListBySubscription", - "description": "List Agent resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_ListBySubscription", - "description": "List Bookshelf resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_ListBySubscription", - "description": "List DataContainer resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_ListBySubscription", - "description": "List DataAsset resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_ListBySubscription", - "description": "List StorageContainer resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_ListBySubscription", - "description": "List StorageAsset resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_ListBySubscription", - "description": "List ChatModelDeployment resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_ListBySubscription", - "description": "List Model resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_ListBySubscription", - "description": "List NodePool resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_ListBySubscription", - "description": "List Tool resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_ListBySubscription", - "description": "List Workflow resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_ListBySubscription", - "description": "List Project resources by subscription ID" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_ListByResourceGroup", - "description": "List Agent resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_Get", - "description": "Get a Agent" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_CreateOrUpdate", - "description": "Create a Agent" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_Update", - "description": "Update a Agent" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "agents", - "operation": "Agents_Delete", - "description": "Delete a Agent" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_ListByResourceGroup", - "description": "List Bookshelf resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_Get", - "description": "Get a Bookshelf" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_CreateOrUpdate", - "description": "Create a Bookshelf" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_Update", - "description": "Update a Bookshelf" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves", - "operation": "Bookshelves_Delete", - "description": "Delete a Bookshelf" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_ListByResourceGroup", - "description": "List DataContainer resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_Get", - "description": "Get a DataContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_CreateOrUpdate", - "description": "Create a DataContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_Update", - "description": "Update a DataContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataContainers", - "operation": "DataContainers_Delete", - "description": "Delete a DataContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_ListByDataContainer", - "description": "List DataAsset resources by DataContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_Get", - "description": "Get a DataAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_CreateOrUpdate", - "description": "Create a DataAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_Update", - "description": "Update a DataAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "dataAssets", - "operation": "DataAssets_Delete", - "description": "Delete a DataAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_ListByResourceGroup", - "description": "List StorageContainer resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_Get", - "description": "Get a StorageContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_CreateOrUpdate", - "description": "Create a StorageContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_Update", - "description": "Update a StorageContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageContainers", - "operation": "StorageContainers_Delete", - "description": "Delete a StorageContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_ListByStorageContainer", - "description": "List StorageAsset resources by StorageContainer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_Get", - "description": "Get a StorageAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_CreateOrUpdate", - "description": "Create a StorageAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_Update", - "description": "Update a StorageAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storageAssets", - "operation": "StorageAssets_Delete", - "description": "Delete a StorageAsset" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_ListByWorkspace", - "description": "List ChatModelDeployment resources by Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_Get", - "description": "Get a ChatModelDeployment" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_CreateOrUpdate", - "description": "Create a ChatModelDeployment" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_Update", - "description": "Update a ChatModelDeployment" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "chatModelDeployments", - "operation": "ChatModelDeployments_Delete", - "description": "Delete a ChatModelDeployment" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_ListByResourceGroup", - "description": "List Model resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_Get", - "description": "Get a Model" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_CreateOrUpdate", - "description": "Create a Model" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_Update", - "description": "Update a Model" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "models", - "operation": "Models_Delete", - "description": "Delete a Model" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_ListBySupercomputer", - "description": "List NodePool resources by Supercomputer" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_Get", - "description": "Get a NodePool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_CreateOrUpdate", - "description": "Create a NodePool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_Update", - "description": "Update a NodePool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "nodePools", - "operation": "NodePools_Delete", - "description": "Delete a NodePool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_ListByResourceGroup", - "description": "List Tool resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_Get", - "description": "Get a Tool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_CreateOrUpdate", - "description": "Create a Tool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_Update", - "description": "Update a Tool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_Delete", - "description": "Delete a Tool" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_ListByResourceGroup", - "description": "List Workflow resources by resource group" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_Get", - "description": "Get a Workflow" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_CreateOrUpdate", - "description": "Create a Workflow" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_Update", - "description": "Update a Workflow" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workflows", - "operation": "Workflows_Delete", - "description": "Delete a Workflow" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_ListByWorkspace", - "description": "List Project resources by Workspace" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_Get", - "description": "Get a Project" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_CreateOrUpdate", - "description": "Create a Project" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_Update", - "description": "Update a Project" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "projects", - "operation": "Projects_Delete", - "description": "Delete a Project" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "tools", - "operation": "Tools_Run", - "description": "Run the specified tool in the context of the specified project." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgebases", - "operation": "KnowledgeBases_ListKnowledgeBases", - "description": "List KnowledgeBase resources" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgebases", - "operation": "KnowledgeBases_GetOperationStatus", - "description": "Get status of a KnowledgeBase LRO (create/delete)." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_GetOperationStatus", - "description": "Get status of a KnowledgeBaseVersion LRO (create/delete)." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgebases", - "operation": "KnowledgeBases_DeleteKnowledgeBase", - "description": "Delete a KnowledgeBase." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_ListKnowledgeBaseVersions", - "description": "List KnowledgeBaseVersion resources" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_DeleteKnowledgeBaseVersion", - "description": "Delete a KnowledgeBaseVersion." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_CreateOrUpdateKnowledgeBaseVersion", - "description": "Creates or updates a KnowledgeBaseVersion." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_StartIndexing", - "description": "Start indexing." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_StopIndexing", - "description": "Stop indexing." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "conversations", - "operation": "Conversations_ListConversations", - "description": "List Conversation resources" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "knowledgeBaseVersions", - "operation": "KnowledgeBaseVersions_Search", - "description": "Search the knowledge base." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "conversations", - "operation": "Conversations_CreateConversation", - "description": "Creates a Conversation." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "conversations", - "operation": "Conversations_DeleteConversation", - "description": "Deletes a conversation." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "investigations", - "operation": "Investigations_ListInvestigations", - "description": "List Investigation resources" - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "investigations", - "operation": "Investigations_CreateOrUpdateInvestigation", - "description": "Creates or updates Investigation." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "investigations", - "operation": "Investigations_DeleteInvestigation", - "description": "Delete a Investigation." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "supercomputers/nodepools", - "operation": "Tools_Run", - "description": "Run the specified tool on a supercomputers/nodepool resource." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": true, - "display": { - "provider": "Microsoft.Discovery", - "resource": "storages", - "operation": "Storages_Mount", - "description": "Mount a storage resource on a supercomputers/nodepool." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnectionProxies", - "operation": "Bookshelves_GetPrivateEndpointConnectionProxy", - "description": "Get a private endpoint connection proxy on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnectionProxies", - "operation": "Bookshelves_WritePrivateEndpointConnectionProxy", - "description": "Create a private endpoint connection proxy on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnectionProxies", - "operation": "Bookshelves_DeletePrivateEndpointConnectionProxy", - "description": "Delete a private endpoint connection proxy on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnectionProxies", - "operation": "Bookshelves_ValidatePrivateEndpointConnectionProxy", - "description": "Validate a private endpoint connection proxy on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnections", - "operation": "Bookshelves_GetPrivateEndpointConnection", - "description": "Get a private endpoint connection on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnections", - "operation": "Bookshelves_WritePrivateEndpointConnection", - "description": "Create a private endpoint connection on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnections", - "operation": "Bookshelves_DeletePrivateEndpointConnection", - "description": "Delete a private endpoint connection on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateLinkResources", - "operation": "Bookshelves_GetPrivateLinkResource", - "description": "Get a private link resource on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "bookshelves/privateEndpointConnectionsApproval", - "operation": "Bookshelves_PrivateEndpointConnectionsApproval", - "description": "Approve a private endpoint connection on a bookshelf." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnectionProxies", - "operation": "Workspaces_GetPrivateEndpointConnectionProxy", - "description": "Get a private endpoint connection proxy on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnectionProxies", - "operation": "Workspaces_WritePrivateEndpointConnectionProxy", - "description": "Create a private endpoint connection proxy on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnectionProxies", - "operation": "Workspaces_DeletePrivateEndpointConnectionProxy", - "description": "Delete a private endpoint connection proxy on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnectionProxies", - "operation": "Workspaces_ValidatePrivateEndpointConnectionProxy", - "description": "Validate a private endpoint connection proxy on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnections", - "operation": "Workspaces_GetPrivateEndpointConnection", - "description": "Get a private endpoint connection on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnections", - "operation": "Workspaces_WritePrivateEndpointConnection", - "description": "Create a private endpoint connection on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnections", - "operation": "Workspaces_DeletePrivateEndpointConnection", - "description": "Delete a private endpoint connection on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateLinkResources", - "operation": "Workspaces_GetPrivateLinkResource", - "description": "Get a private link resource on a workspace." - }, - "properties": null - }, - { - "name": "Sanitized", - "isDataAction": false, - "display": { - "provider": "Microsoft.Discovery", - "resource": "workspaces/privateEndpointConnectionsApproval", - "operation": "Workspaces_PrivateEndpointConnectionsApproval", - "description": "Approve a private endpoint connection on a workspace." - }, - "properties": null - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json deleted file mode 100644 index 79ff65c7efc2..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_projects.pyTestProjectstest_list_projects_by_workspace.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/newapiversiontest/providers/Microsoft.Discovery/workspaces/wrksptest44/projects?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "12", - "Content-Type": "application/json; charset=utf-8", - "Date": "Wed, 04 Feb 2026 15:48:27 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "421c8562-02dc-41b2-8bcc-322cc8e5a2ca", - "x-ms-original-request-ids": "9166d87a-188d-42a9-933f-c49f0df916f4", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260204T154827Z:421c8562-02dc-41b2-8bcc-322cc8e5a2ca", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: E6307FB1B53D4C72B57E51521239D7B9 Ref B: SN4AA2022302025 Ref C: 2026-02-04T15:48:26Z" - }, - "ResponseBody": { - "value": [] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json deleted file mode 100644 index ce3e636c5c3e..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_create_storage_asset.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "123", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "495", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:01:02 GMT", - "ETag": "\"4b00ed4c-0000-1000-0000-69a705be0000\"", - "Expires": "-1", - "mise-correlation-id": "fbf0ad2c-ff68-428d-be23-20542cd9901d", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "05c0319e-9cfa-411b-8fce-38da30a7bfa4", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/b5bfbdbd-e30b-48a9-805b-701197b4ce9b", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "800", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T160102Z:05c0319e-9cfa-411b-8fce-38da30a7bfa4", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: F2D3F321CE4940B7B653F68DB46AEEBE Ref B: SN4AA2022304021 Ref C: 2026-03-03T16:00:55Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers/storageassets", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-03T16:00:59.1416626Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" - }, - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets", - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json deleted file mode 100644 index 8f6625216d5b..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_delete_storage_asset.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Thu, 05 Mar 2026 15:34:19 GMT", - "ETag": "\"3501b5f8-0000-1000-0000-69a9a27a0000\"", - "Expires": "-1", - "mise-correlation-id": "b0c6342d-bf79-446f-894d-ac3b4806be64", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "b8d9d8d0-ae99-4b15-8cfe-50ca2812ae4b", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/9f03118b-de92-4c9a-b2cf-d7584f292739", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T153419Z:b8d9d8d0-ae99-4b15-8cfe-50ca2812ae4b", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: C83969BB50A74F2880C72E7ED8B0F5B7 Ref B: SN4AA2022305039 Ref C: 2026-03-05T15:34:15Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json deleted file mode 100644 index 1e1012da8028..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_get_storage_asset.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "466", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:57:22 GMT", - "ETag": "\"23005205-0000-1100-0000-69a706050000\"", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "c4989ccd-5e10-47f9-8f7b-992c408bd003", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165723Z:c4989ccd-5e10-47f9-8f7b-992c408bd003", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: C0CD5C9470934E908BAFBE47532CB194 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:22Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers/storageassets", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-03T16:00:59.1416626Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" - }, - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets", - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json deleted file mode 100644 index ce5687b8d5ec..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_list_storage_assets_by_storage_container.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "478", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:57:13 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "38247173-cf4f-4f5a-afa0-f852a86405fc", - "x-ms-original-request-ids": [ - "1d647d30-33b9-439b-a2f0-2d58b7ac8faa", - "d1345bdb-60e4-46f5-815f-771fd96775b0", - "3caf5592-135b-49ad-8d34-e6a2688e69db", - "12fc2d33-a863-4941-8b59-c180e83b533b", - "c1debb6a-c718-42d7-9cb8-8672380d56e0", - "d366118e-57c6-4362-9e49-5c691c1ab793" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165714Z:38247173-cf4f-4f5a-afa0-f852a86405fc", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 9B840026E4E3499CB21D8C0C80CD856B Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:13Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers/storageassets", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-03T16:00:59.1416626Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-03T16:00:59.1416626Z" - }, - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets", - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json deleted file mode 100644 index 285f99d81c8a..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_assets.pyTestStorageAssetstest_update_storage_asset.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a/storageAssets/test-sa-482ad005?api-version=2026-02-01-preview", - "RequestMethod": "PATCH", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "46", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "529", - "Content-Type": "application/json; charset=utf-8", - "Date": "Thu, 05 Mar 2026 15:24:42 GMT", - "ETag": "\"34010cee-0000-1000-0000-69a9a03a0000\"", - "Expires": "-1", - "mise-correlation-id": "20278466-5aac-4316-8254-681410f23aa2", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "1ffbcbfa-2b29-421e-9bf1-f6d7e8fc2d3f", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/63f61a44-0d5f-47de-b453-09118032ee47", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152442Z:1ffbcbfa-2b29-421e-9bf1-f6d7e8fc2d3f", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 06A1444688884787918F4527029B6586 Ref B: SN4AA2022303027 Ref C: 2026-03-05T15:24:39Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers/storageassets", - "location": "uksouth", - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-03T16:00:59.1416626Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-05T15:24:40.3080887Z" - }, - "properties": { - "description": "Test storage asset for SDK validation", - "path": "data/test-assets", - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json deleted file mode 100644 index 1ac232150ee1..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_create_storage_container.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "235", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "storageStore": { - "kind": "AzureStorageBlob", - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr" - } - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "593", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 20:45:52 GMT", - "ETag": "\"9101ee8a-0000-1000-0000-69a5f7000000\"", - "Expires": "-1", - "mise-correlation-id": "b83707e3-b44e-4ce3-b628-47b7c5fc7b8b", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "a97e7fdb-968f-4c4a-8988-5373a62a8cb5", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/7affb2b8-0fa0-4fcd-9228-6f4380dc12d9", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T204553Z:a97e7fdb-968f-4c4a-8988-5373a62a8cb5", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 16838EA751A546F7BF37A5F6CA3A82AB Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:45:47Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:45:51.2603686Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json deleted file mode 100644 index 97d788d2d519..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_delete_storage_container.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Thu, 05 Mar 2026 16:16:36 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "6aa0ee30-075f-4371-bcbe-fc7024a876b3", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T161637Z:6aa0ee30-075f-4371-bcbe-fc7024a876b3", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 56E0240202D740298DE1A112D472CA14 Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:16:36Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json deleted file mode 100644 index a924dba9f86b..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_get_storage_container.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "563", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:49 GMT", - "ETag": "\"00005e1a-0000-1100-0000-69a5f73d0000\"", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "5e8c3116-f76d-4cac-8dac-bf5e8272b7ec", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165650Z:5e8c3116-f76d-4cac-8dac-bf5e8272b7ec", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 7E4F06471CD7419BB8D9DFD12883ABE1 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:49Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:45:51.2603686Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json deleted file mode 100644 index b25187edf727..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_resource_group.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "575", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:39 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "f96ec5fc-d361-42a0-bb50-accab8495cfb", - "x-ms-original-request-ids": [ - "b0589582-e88a-43e9-ad13-7a12bf8e251b", - "bb0f09fb-174b-4d75-a2b4-a9048503dd80", - "d9822582-d6de-475e-aa18-e7a9a7bfcbdd", - "23f70422-6ec4-467b-8c6b-00a3b54ed23b", - "19068a6f-a31c-4396-b8b2-ccda25a6b2d7", - "1d518613-bcef-4957-bb52-ec4f7ec84ff8" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165640Z:f96ec5fc-d361-42a0-bb50-accab8495cfb", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 1E1FC7A1B1BE43BE9359DA1BFD41998C Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:39Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:45:51.2603686Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json deleted file mode 100644 index a7faed18513d..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_list_storage_containers_by_subscription.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/storageContainers?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1070", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:44 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "1d3b7639-709d-410f-a99e-347bfe6e2520", - "x-ms-original-request-ids": [ - "128a456b-21d4-480d-98ba-faf629e50062", - "02a69c46-a2bb-41dd-a69f-78b51e06c6b6", - "342bb941-e37d-4b38-b604-ad4758ef1a49", - "3df070ce-6ec2-4c6c-9b01-fc9121d9776a", - "d3c3125f-f407-4c4d-9f09-b676a2f8bba2", - "9675f956-e7b1-42d8-9e1f-2fcf6df9e339" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165645Z:1d3b7639-709d-410f-a99e-347bfe6e2520", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 46B242B0E81942C498579F3A2309AF54 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:44Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": {}, - "systemData": { - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-03-02T20:56:51.8425901Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/deray-private-test/providers/Microsoft.Storage/storageAccounts/derayprstg", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:45:51.2603686Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:45:51.2603686Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json deleted file mode 100644 index 8b6317328228..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_storage_containers.pyTestStorageContainerstest_update_storage_container.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/storageContainers/test-sc-8bef0d1a?api-version=2026-02-01-preview", - "RequestMethod": "PATCH", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "46", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "627", - "Content-Type": "application/json; charset=utf-8", - "Date": "Thu, 05 Mar 2026 15:32:33 GMT", - "ETag": "\"02003231-0000-1000-0000-69a9a2110000\"", - "Expires": "-1", - "mise-correlation-id": "a5e9029e-b294-4d20-94b5-94bab37c28e2", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "7de6c9bb-1ac1-45e3-86dd-57473dc24f56", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/58fb1db6-4957-4392-a237-0d7fec9758b7", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T153234Z:7de6c9bb-1ac1-45e3-86dd-57473dc24f56", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 6B12E9230F404F4ABABF5B16A8BE3566 Ref B: SN4AA2022304051 Ref C: 2026-03-05T15:32:31Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/storagecontainers", - "location": "uksouth", - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:45:51.2603686Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-05T15:32:32.3368841Z" - }, - "properties": { - "storageStore": { - "storageAccountId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Storage/storageAccounts/mytststr", - "kind": "AzureStorageBlob" - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json deleted file mode 100644 index f23d2ddd74da..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_create_supercomputer.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "453", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "identities": { - "clusterIdentity": { - "id": "Sanitized" - }, - "kubeletIdentity": { - "id": "Sanitized" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": {} - } - } - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1243", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 20:26:41 GMT", - "ETag": "\"e101d9d9-0000-1000-0000-69a5f2820000\"", - "Expires": "-1", - "mise-correlation-id": "4f7d7d75-d760-4ec1-b640-15b0fbcf3b07", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "b1a9b1bb-311b-40b3-aed7-78037ba0d1ad", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/e2195b76-5489-4fe6-a942-8ed47002bea9", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T202642Z:b1a9b1bb-311b-40b3-aed7-78037ba0d1ad", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 6C2AD6EA55DE440EA450F96E026D12CE Ref B: SN4AA2022305031 Ref C: 2026-03-02T20:26:35Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:26:38.4028447Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:26:38.4028447Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_D4s_v6", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json deleted file mode 100644 index c642ecddca09..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_delete_supercomputer.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Thu, 05 Mar 2026 16:04:52 GMT", - "ETag": "\"3e0096f1-0000-1000-0000-69a9a9a40000\"", - "Expires": "-1", - "mise-correlation-id": "c7152a84-2565-418f-8a89-fe8f6008a10e", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "182e92e9-49a4-4087-ae35-b131b971ee7a", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/d42a3ac5-0953-407c-912d-d31b077ba1e3", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T160453Z:182e92e9-49a4-4087-ae35-b131b971ee7a", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 28FDC45C02654AF386BE3B1358304C4A Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:04:50Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json deleted file mode 100644 index 3a1520d6ac1d..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_get_supercomputer.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers/test-sc-2bbb25b8?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1198", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:34 GMT", - "ETag": "\"f80198c3-0000-1000-0000-69a61bb00000\"", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "00bb6c3e-6d7f-44d5-bef2-eb9500417db4", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165635Z:00bb6c3e-6d7f-44d5-bef2-eb9500417db4", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 1243CEAFB57F4CD4B783D528FA0097C2 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:34Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:26:38.4028447Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_D4s_v6", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json deleted file mode 100644 index 3f4d62f7938d..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_resource_group.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/supercomputers?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1210", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:25 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "86ed8968-dd64-46a4-80d6-7af7a10d76b6", - "x-ms-original-request-ids": [ - "18f7d244-a82f-45e2-aebe-c34308c2aa3b", - "33b0a420-8c88-4600-8794-330571b2fe8b", - "34317fa1-86b7-44c2-924c-e676d5065983", - "602166b4-ebdb-4ee3-8313-ad710669ba36", - "207ed322-ff3a-4d35-97d1-14b87829966e", - "ae5581c7-b238-4efc-8ae1-db1e76740a11" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165626Z:86ed8968-dd64-46a4-80d6-7af7a10d76b6", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 85DB5D24A851438DB933E355650980E0 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:25Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:26:38.4028447Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_D4s_v6", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json deleted file mode 100644 index 669bf09dabf2..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_supercomputers.pyTestSupercomputerstest_list_supercomputers_by_subscription.json +++ /dev/null @@ -1,268 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/supercomputers?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "6214", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:30 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "0d5fe431-a3be-4211-b90f-a28a9b4ecffe", - "x-ms-original-request-ids": [ - "9dcdcbe5-1551-4c9d-86f6-35ef8fd9f25a", - "78ac76bb-9727-4bef-ba20-606c17274eae", - "0117e727-04ef-438c-bf02-cfeda583b7d3", - "f83716ec-8acb-4279-8cc4-e6d1d131f2c1", - "5d21fb26-951c-413c-b380-d28ebe5914fc", - "329a4075-e041-4d8e-94d2-b2d7dfbec5b6" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1098", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165631Z:0d5fe431-a3be-4211-b90f-a28a9b4ecffe", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: D2A5DAC3072943CAB3E8220C9816B26E Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:31Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "tags": { - "createdBy": "Sanitized" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2025-11-18T14:01:46.9205594Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-19T13:40:29.0395037Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_D4s_v5", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Failed" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "tags": { - "createdBy": "Sanitized" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:18:20.4616551Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:34:40.7398302Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-19T14:27:41.4552807Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-19T16:36:06.8831442Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Failed" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-19T14:28:47.9432207Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-19T16:23:40.0680035Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/supercomputer-aks", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/fixedrg-dev-uksouth1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dev-uksouth1-uami": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_B4ms", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Failed" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/supercomputers", - "location": "uksouth", - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:26:38.4028447Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-03-02T23:22:24.8691796Z" - }, - "properties": { - "subnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "identities": { - "clusterIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "kubeletIdentity": { - "id": "Sanitized", - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - }, - "workloadIdentities": { - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity": { - "principalId": "00000000-0000-0000-0000-000000000000", - "clientId": "00000000-0000-0000-0000-000000000000" - } - } - }, - "systemSku": "Standard_D4s_v6", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json deleted file mode 100644 index f09dc440833e..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_create_tool.json +++ /dev/null @@ -1,175 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "959", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - } - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1399", - "Content-Type": "application/json; charset=utf-8", - "Date": "Mon, 02 Mar 2026 21:02:34 GMT", - "ETag": "\"5202f689-0000-1000-0000-69a5faea0000\"", - "Expires": "-1", - "mise-correlation-id": "df95acd4-2805-47d7-935e-f90b98a20e12", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "6c02fc28-d2f7-42a0-9d7f-75e2865ca4ae", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/129656c1-1656-49c7-a842-a7dbeb530c5f", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260302T210235Z:6c02fc28-d2f7-42a0-9d7f-75e2865ca4ae", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 9C4A47B80DDC43CBA5F21AC72A0BAFBC Ref B: SN4AA2022302033 Ref C: 2026-03-02T21:02:29Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T21:02:33.801961Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T21:02:33.801961Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json deleted file mode 100644 index c7acd113ab93..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_delete_tool.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Thu, 05 Mar 2026 16:25:45 GMT", - "ETag": "\"57005f4d-0000-1000-0000-69a9ae8a0000\"", - "Expires": "-1", - "mise-correlation-id": "658a0cb8-2437-4086-b630-88d78341fe20", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "2578b064-b489-4a83-bf36-8ad37814c1c6", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/45996932-0745-44bc-99d5-292a2f7de8c5", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T162546Z:2578b064-b489-4a83-bf36-8ad37814c1c6", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 2F889D57B7CF4F6F9161EEBE4786734A Ref B: SN4AA2022302047 Ref C: 2026-03-05T16:25:43Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json deleted file mode 100644 index f5b87812fbf1..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_get_tool.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1306", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:57:05 GMT", - "ETag": "\"b6006ee1-0000-1100-0000-69a5fb340000\"", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "67238903-bbd5-459d-a4e1-9449565cf851", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165706Z:67238903-bbd5-459d-a4e1-9449565cf851", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 982886DE539D41C0AB2851506AEDD7B5 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:57:05Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T21:02:33.801961Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T21:02:33.801961Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json deleted file mode 100644 index 3b3abe5414e7..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_resource_group.json +++ /dev/null @@ -1,119 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1318", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:59 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "541d9c0a-94d2-4844-a123-fb5b0e0c8666", - "x-ms-original-request-ids": [ - "d41b656b-4cb2-42e6-a1e9-2ff9d5e481bd", - "e16a30c3-93f0-4ea4-8c00-083db7bc4f48", - "69ee1886-677c-4c20-9732-964b577cfc99", - "bdcc1e83-f278-4236-aa41-b726b3bd6ba0", - "923061dc-ba8c-4eed-9065-70a54f7d2352", - "2fdfc08b-6123-41f3-a070-8f60fedbf59e" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165700Z:541d9c0a-94d2-4844-a123-fb5b0e0c8666", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 9D5B89D7B96544C797A3CB22F206348C Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:59Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T21:02:33.801961Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T21:02:33.801961Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json deleted file mode 100644 index 1949b5c8b5e9..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_list_tools_by_subscription.json +++ /dev/null @@ -1,2057 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/tools?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "44417", - "Content-Type": "application/json; charset=utf-8", - "Date": "Tue, 03 Mar 2026 16:56:53 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "ba5c423f-1bea-443b-80d8-fdd88ef15ecc", - "x-ms-original-request-ids": [ - "998e5771-29cf-416c-b142-1107d9b060c8", - "e638e0b5-9012-4b7c-b6fc-0280a449588b", - "0a891fd2-5bda-4025-b342-e6559e275da9", - "b337c21e-20cd-4c86-8234-b867ee2d78ca", - "4e3b42cd-b3c4-4893-b469-709b992130fc", - "3cd9edb1-c696-4314-b0df-27a5ce829793" - ], - "x-ms-ratelimit-remaining-subscription-reads": "1098", - "x-ms-routing-request-id": "EASTUS2EUAP:20260303T165654Z:ba5c423f-1bea-443b-80d8-fdd88ef15ecc", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 89580D6FD89E4356A110CA6E19D8E3D9 Ref B: DM2AA1091212011 Ref C: 2026-03-03T16:56:54Z" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2025-09-16T11:09:11.8986542Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2025-09-16T11:18:07.8437828Z" - }, - "properties": { - "environmentVariables": { - "LOGP_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-logp", - "BOILING_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-bp", - "CRITICAL_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-tc", - "DENSITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-density", - "SOLUBILITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-solubility" - }, - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\"\".", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:40:15.3174601Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:49:16.0537055Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\"\".", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": { - "LOGP_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-logp", - "BOILING_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-bp", - "CRITICAL_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-tc", - "DENSITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-density", - "SOLUBILITY_ENDPOINT_ID": "/subscriptions/8bd6cf1f-7ca2-4b66-8ec3-3a7620027b80/resourceGroups/alzam-westus-gpu/providers/Microsoft.MachineLearningServices/workspaces/alzam-westus-gpu-ws/onlineEndpoints/molpredictor-solubility" - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:40:39.3470173Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:48:18.2325096Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:41:02.0050284Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:48:19.5365998Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This tool is used to test inline files and output mount configurations.", - "version": "1.0.0", - "category": "testing", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/testiotool:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "2Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Test action to validate inline file input and output mount functionality.", - "input_schema": { - "type": "object", - "properties": { - "test_inline_file": { - "type": "string", - "description": "This is a test inline file, you should provide some dummy text" - } - }, - "required": [ - "test_inline_file" - ] - }, - "command": "python test_io_script.py", - "inline_files": [ - { - "mount_path": "/app/input/test_file.txt", - "content": "{{{ test_inline_file }}}" - } - ], - "output_mount_configurations": [ - { - "mount_path": "/app/outputs/", - "auto_promote": true, - "output_name": "testiooutput", - "output_description": "Test output files generated from processing the inline input file" - } - ], - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-12T20:02:50.7457408Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-12T20:03:17.4513262Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-12T20:02:50.8238918Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-12T20:03:53.4456326Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-13T17:13:37.8488789Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-13T17:14:12.587767Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-13T17:13:38.3175742Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-13T17:13:58.3227152Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-14T01:12:40.1293186Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-14T01:13:00.4649916Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-14T01:12:40.4886867Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-02-14T01:13:14.7006469Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings read from .txt files in a directory. Note, input mount path where the SMILES data will be stored for the computation is \"/app/input_smiles\". Multiple smiles may be processed within the same run as long as they are mounted to the same input path. Outputs will be stored in \"/app/outputs\".", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-17T13:36:57.2052466Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-17T13:36:57.2052466Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", - "version": "1.0.0", - "category": "Scientific Computing", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "4Gi", - "storage": "8Gi", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "8Gi", - "storage": "32Gi", - "gpu": "0" - }, - "infiniband": false, - "recommended_sku": [ - "Standard_D4s_v3" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." - }, - "csvLimit": { - "type": "integer", - "description": "(Optional) Only process the first N rows of the CSV." - }, - "outputDir": { - "type": "string", - "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." - }, - "outputFile": { - "type": "string", - "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." - }, - "sequenceName": { - "type": "string", - "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ], - "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." - }, - "vhhSequence": { - "type": "string", - "description": "VHH sequence string (required when format is 'vhh')." - }, - "heavyChain": { - "type": "string", - "description": "Heavy chain sequence (required when format is 'conventional')." - }, - "lightChain": { - "type": "string", - "description": "Light chain sequence (required when format is 'conventional')." - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "Path to a CSV file inside the container to build sequences from." - }, - "format": { - "type": "string", - "description": "Sequence format: 'vhh' or 'conventional'." - } - }, - "required": [ - "format", - "csvFile" - ] - }, - "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", - "infra_node": "worker", - "output_mount_configurations": [ - { - "mount_path": "/app/outputs/", - "auto_promote": true, - "output_name": "DevelopabilityResults", - "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." - } - ] - }, - { - "name": "Sanitized", - "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "outputDir": { - "type": "string" - }, - "outputFile": { - "type": "string" - }, - "liabilitiesSeqColumn": { - "type": "string" - }, - "sequence": { - "type": "string" - }, - "chain": { - "type": "string", - "enum": [ - "vhh", - "heavy", - "light" - ] - }, - "includeStructural": { - "type": "boolean" - }, - "scheme": { - "type": "string" - }, - "filterGermline": { - "type": "boolean" - }, - "regionScope": { - "type": "string", - "enum": [ - "ALL", - "CDR" - ] - } - }, - "required": [ - "chain" - ] - }, - "command": "python main.py -a liabilities -c '{{chain}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Submit sequences for multi-parameter optimization and return a job ID.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "sequenceName": { - "type": "string" - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ] - }, - "vhhSequence": { - "type": "string" - }, - "heavyChain": { - "type": "string" - }, - "lightChain": { - "type": "string" - }, - "strategy": { - "type": "string", - "enum": [ - "aggressive", - "balanced", - "conservative" - ] - }, - "maxVariants": { - "type": "integer" - }, - "mutationDepth": { - "type": "integer" - }, - "excludeRegions": { - "type": "string" - }, - "includeOnlyRegions": { - "type": "string" - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a optimize -f '{{format}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Check the status of an optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a status --job-id '{{jobId}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Retrieve results for a completed optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a results --job-id '{{jobId}}'", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-17T13:44:17.1956309Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-17T13:44:17.1956309Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", - "version": "1.0.0", - "category": "Scientific Computing", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "4Gi", - "storage": "8Gi", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "8Gi", - "storage": "32Gi", - "gpu": "0" - }, - "infiniband": false, - "recommended_sku": [ - "Standard_D4s_v3" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." - }, - "csvLimit": { - "type": "integer", - "description": "(Optional) Only process the first N rows of the CSV." - }, - "outputDir": { - "type": "string", - "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." - }, - "outputFile": { - "type": "string", - "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." - }, - "sequenceName": { - "type": "string", - "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ], - "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." - }, - "vhhSequence": { - "type": "string", - "description": "VHH sequence string (required when format is 'vhh')." - }, - "heavyChain": { - "type": "string", - "description": "Heavy chain sequence (required when format is 'conventional')." - }, - "lightChain": { - "type": "string", - "description": "Light chain sequence (required when format is 'conventional')." - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "Path to a CSV file inside the container to build sequences from." - }, - "format": { - "type": "string", - "description": "Sequence format: 'vhh' or 'conventional'." - } - }, - "required": [ - "format", - "csvFile" - ] - }, - "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", - "infra_node": "worker", - "output_mount_configurations": [ - { - "mount_path": "/app/outputs/", - "auto_promote": true, - "output_name": "DevelopabilityResults", - "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." - } - ] - }, - { - "name": "Sanitized", - "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "outputDir": { - "type": "string" - }, - "outputFile": { - "type": "string" - }, - "liabilitiesSeqColumn": { - "type": "string" - }, - "sequence": { - "type": "string" - }, - "chain": { - "type": "string", - "enum": [ - "vhh", - "heavy", - "light" - ] - }, - "includeStructural": { - "type": "boolean" - }, - "scheme": { - "type": "string" - }, - "filterGermline": { - "type": "boolean" - }, - "regionScope": { - "type": "string", - "enum": [ - "ALL", - "CDR" - ] - } - }, - "required": [ - "chain" - ] - }, - "command": "python main.py -a liabilities -c '{{chain}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Submit sequences for multi-parameter optimization and return a job ID.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "sequenceName": { - "type": "string" - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ] - }, - "vhhSequence": { - "type": "string" - }, - "heavyChain": { - "type": "string" - }, - "lightChain": { - "type": "string" - }, - "strategy": { - "type": "string", - "enum": [ - "aggressive", - "balanced", - "conservative" - ] - }, - "maxVariants": { - "type": "integer" - }, - "mutationDepth": { - "type": "integer" - }, - "excludeRegions": { - "type": "string" - }, - "includeOnlyRegions": { - "type": "string" - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a optimize -f '{{format}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Check the status of an optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a status --job-id '{{jobId}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Retrieve results for a completed optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a results --job-id '{{jobId}}'", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-17T13:50:19.5978763Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-17T13:50:19.5978763Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "MOO (Multi-Parameter Optimization) Tool provides antibody engineering capabilities for sequence validation, developability prediction, liability analysis, and optimization. It validates antibody sequences (VHH and conventional IgG), predicts developability metrics like expression and stability, analyzes manufacturability liabilities, and performs multi-parameter optimization to generate improved variants. Results include detailed scores, mutations, and recommendations for antibody design.", - "version": "1.0.0", - "category": "Scientific Computing", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "acrantibodydev.azurecr.io/moo-client-csv:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "4Gi", - "storage": "8Gi", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "8Gi", - "storage": "32Gi", - "gpu": "0" - }, - "infiniband": false, - "recommended_sku": [ - "Standard_D4s_v3" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Validate antibody sequences and return parsed data. Supports both VHH (single domain) and conventional IgG (heavy+light chain) formats. Returns validation status and parsed sequences with sequence lengths.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "(Optional) Path to a CSV file inside the container to build sequences from (e.g., /app/inputs/sequences.csv)." - }, - "csvLimit": { - "type": "integer", - "description": "(Optional) Only process the first N rows of the CSV." - }, - "outputDir": { - "type": "string", - "description": "(Optional) Output directory inside the container for annotated CSV outputs (default: ./output)." - }, - "outputFile": { - "type": "string", - "description": "(Optional) Output filename (or absolute path) for annotated CSV outputs." - }, - "sequenceName": { - "type": "string", - "description": "Name for the sequence (e.g., \"my-vhh\", \"my-igg\")." - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ], - "description": "Sequence format: 'vhh' for single domain antibodies or 'conventional' for IgG with heavy and light chains." - }, - "vhhSequence": { - "type": "string", - "description": "VHH sequence string (required when format is 'vhh')." - }, - "heavyChain": { - "type": "string", - "description": "Heavy chain sequence (required when format is 'conventional')." - }, - "lightChain": { - "type": "string", - "description": "Light chain sequence (required when format is 'conventional')." - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a validate{{#if sequenceName}} -n '{{sequenceName}}'{{/if}} -f '{{format}}'{{#if csvFile}} --csv-file '{{csvFile}}'{{/if}}{{#if csvLimit}} --csv-limit {{csvLimit}}{{/if}}{{#if outputDir}} --output-dir '{{outputDir}}'{{/if}}{{#if outputFile}} --output-file '{{outputFile}}'{{/if}}{{#if vhhSequence}} --vhh-sequence '{{vhhSequence}}'{{/if}}{{#if heavyChain}} --heavy-chain '{{heavyChain}}'{{/if}}{{#if lightChain}} --light-chain '{{lightChain}}'{{/if}}", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Predict sequence developability metrics including expression, HIC retention, ECM, and BV.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string", - "description": "Path to a CSV file inside the container to build sequences from." - }, - "format": { - "type": "string", - "description": "Sequence format: 'vhh' or 'conventional'." - } - }, - "required": [ - "format", - "csvFile" - ] - }, - "command": "python main.py -a developability -f '{{format}}' --csv-file '{{csvFile}}'", - "infra_node": "worker", - "output_mount_configurations": [ - { - "mount_path": "/app/outputs/", - "auto_promote": true, - "output_name": "DevelopabilityResults", - "output_description": "The results of developability predictions including expression, HIC retention, ECM, and BV metrics." - } - ] - }, - { - "name": "Sanitized", - "description": "Analyze sequences for potential manufacturability liabilities such as oxidation sites, aggregation risks, and structural issues.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "outputDir": { - "type": "string" - }, - "outputFile": { - "type": "string" - }, - "liabilitiesSeqColumn": { - "type": "string" - }, - "sequence": { - "type": "string" - }, - "chain": { - "type": "string", - "enum": [ - "vhh", - "heavy", - "light" - ] - }, - "includeStructural": { - "type": "boolean" - }, - "scheme": { - "type": "string" - }, - "filterGermline": { - "type": "boolean" - }, - "regionScope": { - "type": "string", - "enum": [ - "ALL", - "CDR" - ] - } - }, - "required": [ - "chain" - ] - }, - "command": "python main.py -a liabilities -c '{{chain}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Submit sequences for multi-parameter optimization and return a job ID.", - "input_schema": { - "type": "object", - "properties": { - "csvFile": { - "type": "string" - }, - "csvLimit": { - "type": "integer" - }, - "sequenceName": { - "type": "string" - }, - "format": { - "type": "string", - "enum": [ - "vhh", - "conventional" - ] - }, - "vhhSequence": { - "type": "string" - }, - "heavyChain": { - "type": "string" - }, - "lightChain": { - "type": "string" - }, - "strategy": { - "type": "string", - "enum": [ - "aggressive", - "balanced", - "conservative" - ] - }, - "maxVariants": { - "type": "integer" - }, - "mutationDepth": { - "type": "integer" - }, - "excludeRegions": { - "type": "string" - }, - "includeOnlyRegions": { - "type": "string" - } - }, - "required": [ - "format" - ] - }, - "command": "python main.py -a optimize -f '{{format}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Check the status of an optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a status --job-id '{{jobId}}'", - "infra_node": "worker" - }, - { - "name": "Sanitized", - "description": "Retrieve results for a completed optimization job.", - "input_schema": { - "type": "object", - "properties": { - "jobId": { - "type": "string", - "description": "Job identifier string from optimization submission." - } - }, - "required": [ - "jobId" - ] - }, - "command": "python main.py -a results --job-id '{{jobId}}'", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-26T02:02:25.3606852Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-26T02:02:25.3606852Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T21:02:33.801961Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T21:02:33.801961Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "eastus2euap", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-27T21:13:22.6154699Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-27T21:13:22.6154699Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "eastus2", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2025-08-22T17:39:43.5552623Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2025-08-22T18:10:26.5363936Z" - }, - "properties": { - "environmentVariables": {}, - "version": "0.0.1", - "definitionContent": { - "name": "Sanitized", - "version": "1.0", - "infra": [ - { - "name": "Sanitized", - "image": { - "acr": "acrbslftestprod.azurecr.io/bookshelf-indexing-service:3902098" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "4Gi", - "gpu": 0 - }, - "max_resources": { - "cpu": "4", - "ram": "8Gi", - "gpu": 0 - } - } - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "eastus2", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2025-08-22T17:42:21.2183803Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2025-08-22T17:42:21.2183803Z" - }, - "properties": { - "environmentVariables": {}, - "version": "0.0.1", - "definitionContent": { - "name": "Sanitized", - "version": "1.0", - "infra": [ - { - "name": "Sanitized", - "image": { - "acr": "acrbslftestprod.azurecr.io/bookshelf-indexing-service-canary:3908498" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "4Gi", - "gpu": 0 - }, - "max_resources": { - "cpu": "4", - "ram": "8Gi", - "gpu": 0 - } - } - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "eastus", - "tags": {}, - "systemData": { - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-26T02:07:20.087512Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "eastus", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-27T21:16:19.3888378Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-27T21:16:19.3888378Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "This is the coding tool for CorePython cheminformatics operations.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "infra_type": "container", - "name": "Sanitized", - "image": { - "acr": "demodiscoveryacr.azurecr.io/corepython:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "2Gi", - "gpu": "0", - "storage": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "4Gi", - "storage": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "code_environments": [ - { - "language": "python", - "command": "python3 \"/{{ scriptName }}\"", - "description": "Python code environment with RDKit for molecular manipulation, cheminformatics, and property calculations, as well as general Python code execution. Note: All file outputs should be written to directory /app/outputs. DO NOT FORGET THIS. DO NOT WRITE OUTPUTS TO ANY OTHER DIRECTORY. Utilize the outputMounts parameter to make sure this is mounted correctly and captured after script execution.", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json deleted file mode 100644 index 787bfa358bc8..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_tools.pyTestToolstest_update_tool.json +++ /dev/null @@ -1,121 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/tools/test-tool-50d87c62?api-version=2026-02-01-preview", - "RequestMethod": "PATCH", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "Content-Length": "46", - "Content-Type": "application/json", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": { - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Content-Length": "1433", - "Content-Type": "application/json; charset=utf-8", - "Date": "Thu, 05 Mar 2026 15:29:58 GMT", - "ETag": "\"530098c4-0000-1000-0000-69a9a1760000\"", - "Expires": "-1", - "mise-correlation-id": "065b16fe-308e-4d0b-98a5-91dd76d0fe5b", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "x-azure-ref": "Sanitized", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-build-version": "1", - "x-ms-correlation-request-id": "8030a5e8-13b0-4b17-8825-0c57884f221b", - "x-ms-operation-identifier": "tenantId=00000000-0000-0000-0000-000000000000,objectId=00000000-0000-0000-0000-000000000000/eastus2euap/b7944970-25d4-42ad-a7b8-25a01a24a21e", - "x-ms-providerhub-traffic": "True", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260305T152958Z:8030a5e8-13b0-4b17-8825-0c57884f221b", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 2FC92E67AAD94952BE7DD9011315FF1D Ref B: SN4AA2022303047 Ref C: 2026-03-05T15:29:55Z" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/tools", - "location": "uksouth", - "tags": { - "SkipAutoDeleteTill": "2026-12-31" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T21:02:33.801961Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-05T15:29:56.920121Z" - }, - "properties": { - "version": "1.0.0", - "definitionContent": { - "name": "Sanitized", - "description": "Molecular property prediction for single SMILES strings.", - "version": "1.0.0", - "category": "cheminformatics", - "license": "MIT", - "infra": [ - { - "name": "Sanitized", - "infra_type": "container", - "image": { - "acr": "demodiscoveryacr.azurecr.io/molpredictor:latest" - }, - "compute": { - "min_resources": { - "cpu": "1", - "ram": "1Gi", - "storage": "32", - "gpu": "0" - }, - "max_resources": { - "cpu": "2", - "ram": "1Gi", - "storage": "64", - "gpu": "0" - }, - "recommended_sku": [ - "Standard_D4s_v6" - ], - "pool_type": "static", - "pool_size": 1 - } - } - ], - "actions": [ - { - "name": "Sanitized", - "description": "Predict molecular properties for SMILES strings.", - "input_schema": { - "type": "object", - "properties": { - "action": { - "type": "string", - "description": "The property to predict. Must be one of [log_p, boiling_point, solubility, density, critical_point]" - } - }, - "required": [ - "action" - ] - }, - "command": "python molpredictor.py --action {{ action }}", - "infra_node": "worker" - } - ] - }, - "environmentVariables": {}, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json deleted file mode 100644 index 6bfbe9e5df2f..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_create_workspace.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", - "RequestMethod": "PUT", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", - "Content-Type": "application/json", - "Content-Length": "1121" - }, - "RequestBody": { - "location": "uksouth", - "properties": { - "supercomputerIds": [], - "workspaceIdentity": { - "id": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourcegroups/olawal/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity" - }, - "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default3", - "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default", - "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Network/virtualNetworks/newapiv/subnets/default2", - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyName": "discoverykey", - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "publicNetworkAccess": "Disabled" - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "1606", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "ETag": "\"9a00d931-0000-1000-0000-69a88a160000\"", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "c4763dfb-9418-41c4-b9f8-8a3882a37254", - "x-ms-correlation-request-id": "653482e8-5fd8-4f48-acb5-01f2284cb594", - "x-ms-ratelimit-remaining-subscription-global-reads": "16499", - "x-ms-routing-request-id": "UKSOUTH:20260304T193912Z:653482e8-5fd8-4f48-acb5-01f2284cb594", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "X-MSEdge-Ref": "Ref A: CFE2594B44234894ABDC403EA188F3A8 Ref B: SN4AA2022302031 Ref C: 2026-03-04T19:39:12Z", - "Date": "Wed, 04 Mar 2026 19:39:11 GMT" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T19:36:15.7991238Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", - "supercomputerIds": [], - "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", - "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" - }, - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyName": "discoverykey", - "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json deleted file mode 100644 index 362dbba2ff64..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_delete_workspace.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-397d51cf?api-version=2026-02-01-preview", - "RequestMethod": "DELETE", - "RequestHeaders": { - "Accept": "*/*", - "Connection": "keep-alive", - "Content-Length": "0", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 204, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Date": "Wed, 04 Mar 2026 19:20:57 GMT", - "Expires": "-1", - "Pragma": "no-cache", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Cache": "CONFIG_NOCACHE", - "X-Content-Type-Options": "nosniff", - "x-ms-correlation-request-id": "0ed3acad-a11e-4466-95c3-e80274ea0724", - "x-ms-ratelimit-remaining-subscription-deletes": "799", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T192058Z:0ed3acad-a11e-4466-95c3-e80274ea0724", - "x-ms-throttling-version": "v2", - "X-MSEdge-Ref": "Ref A: 822D8F24B73C45BEA4BC6953384D07AE Ref B: SN4AA2022302035 Ref C: 2026-03-04T19:20:55Z" - }, - "ResponseBody": null - } - ], - "Variables": {} -} diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json deleted file mode 100644 index ec8b131f353f..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_get_workspace.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "1607", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "ETag": "\"0502323c-0000-1100-0000-69a8934a0000\"", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "1dcda016-4a3d-4ada-bdea-69e76ff62001", - "x-ms-correlation-request-id": "3539e8a3-d7d6-4658-843f-6162ac25641e", - "x-ms-ratelimit-remaining-subscription-global-reads": "16499", - "x-ms-routing-request-id": "UKSOUTH:20260304T203849Z:3539e8a3-d7d6-4658-843f-6162ac25641e", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "X-MSEdge-Ref": "Ref A: CF625DDD8A3B415C94D9D49490F08B77 Ref B: SN4AA2022305029 Ref C: 2026-03-04T20:38:49Z", - "Date": "Wed, 04 Mar 2026 20:38:49 GMT" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T19:36:15.7991238Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", - "supercomputerIds": [], - "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", - "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" - }, - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyName": "discoverykey", - "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json deleted file mode 100644 index 874819ed9d62..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_resource_group.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "1619", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "x-ms-original-request-ids": "37bbdb9b-8704-4d62-bceb-8a6d5c04eace", - "x-ms-providerhub-traffic": "True", - "x-ms-correlation-request-id": "994eee78-4cf2-4c9f-abbb-d28d558c65a1", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-ratelimit-remaining-subscription-global-reads": "16499", - "x-ms-request-id": "994eee78-4cf2-4c9f-abbb-d28d558c65a1", - "x-ms-routing-request-id": "UKSOUTH:20260304T203847Z:994eee78-4cf2-4c9f-abbb-d28d558c65a1", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "X-MSEdge-Ref": "Ref A: A8F8C59D0E8F4F1BA565F48F60749B0B Ref B: SN4AA2022303045 Ref C: 2026-03-04T20:38:46Z", - "Date": "Wed, 04 Mar 2026 20:38:46 GMT" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T19:36:15.7991238Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", - "supercomputerIds": [], - "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", - "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" - }, - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyName": "discoverykey", - "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - ] - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json deleted file mode 100644 index 8e683a63e70e..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_list_workspaces_by_subscription.json +++ /dev/null @@ -1,366 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/providers/Microsoft.Discovery/workspaces?api-version=2026-02-01-preview", - "RequestMethod": "GET", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)" - }, - "RequestBody": null, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "14176", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "x-ms-original-request-ids": "e8db88e0-ee84-4bab-b52b-2192290fd359, 8ef79aee-3c20-4ded-a660-a37e3e5a3d8a, ee2f1d3d-51bf-4352-a382-280a8e3630e0", - "x-ms-ratelimit-remaining-subscription-reads": "1099", - "x-ms-ratelimit-remaining-subscription-global-reads": "16499", - "x-ms-request-id": "06af92d8-dc60-485d-8488-dcb9636118aa", - "x-ms-correlation-request-id": "06af92d8-dc60-485d-8488-dcb9636118aa", - "x-ms-routing-request-id": "SOUTHCENTRALUS:20260304T203845Z:06af92d8-dc60-485d-8488-dcb9636118aa", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "X-MSEdge-Ref": "Ref A: 6E4DD90850C44594B7ACC6D7947D373F Ref B: SN4AA2022301017 Ref C: 2026-03-04T20:38:44Z", - "Date": "Wed, 04 Mar 2026 20:38:44 GMT" - }, - "ResponseBody": { - "value": [ - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": { - "WorkspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "Application", - "createdAt": "2026-01-15T00:34:10.0773703Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "Application", - "lastModifiedAt": "2026-01-15T00:34:10.0773703Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-itworkrp114-4bncru", - "supercomputerIds": [ - "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/rp114-rg/providers/Microsoft.Discovery/supercomputers/itsuperp114" - ], - "workspaceApiUri": "https://itworkrp114.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/itworkrp114", - "workspaceIdentity": { - "id": "Sanitized" - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uk south", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-19T05:22:08.6568882Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-19T05:22:08.6568882Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wrksptest53-9ke163", - "supercomputerIds": [], - "workspaceApiUri": "https://wrksptest53.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wrksptest53", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "ed0545f1-159d-4851-9d51-38cace7712d7", - "clientId": "1764701a-3d97-4612-9134-bec5e4b4ae0e" - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": { - "SkipAssociateKeyVaultToNsp": "true" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-25T13:40:46.4913501Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-25T13:40:46.4913501Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wksp25021-9r5ibw", - "supercomputerIds": [], - "workspaceApiUri": "https://wksp25021.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp25021", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "76442198-a696-4763-a8da-c63b99d25643", - "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" - }, - "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet", - "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet", - "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": { - "SkipAssociateKeyVaultToNsp": "true" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-02T20:01:56.834174Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-02T20:01:56.834174Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wksp03031-2788mh", - "supercomputerIds": [], - "workspaceApiUri": "https://wksp03031.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp03031", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "76442198-a696-4763-a8da-c63b99d25643", - "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" - }, - "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet8", - "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", - "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": { - "SkipAssociateKeyVaultToNsp": "true" - }, - "systemData": { - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T11:23:08.6609326Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wksp04031-gaebre", - "supercomputerIds": [], - "workspaceApiUri": "https://wksp04031.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp04031", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "76442198-a696-4763-a8da-c63b99d25643", - "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" - }, - "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet8", - "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", - "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": { - "SkipAssociateKeyVaultToNsp": "true", - "networkIsolation": "true" - }, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T15:12:31.9121216Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T15:12:31.9121216Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wksp04033-bs476h", - "supercomputerIds": [], - "workspaceApiUri": "https://wksp04033.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wksp04033", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "76442198-a696-4763-a8da-c63b99d25643", - "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" - }, - "agentSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/agentSubnet9", - "privateEndpointSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/PESubnet2", - "workspaceSubnetId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/fixedrg-dev-uksouth1/providers/Microsoft.Network/virtualNetworks/vnet-dev-uksouth1/subnets/copilot-subnet", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Failed" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T19:36:15.7991238Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T19:36:15.7991238Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", - "supercomputerIds": [], - "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", - "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" - }, - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyName": "discoverykey", - "keyVersion": "2c9db3cf55d247b4a1c1831fbbdad906" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "centraluseuap", - "tags": {}, - "systemData": { - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-04T09:19:20.1363722Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-wrksptest45-4rxpj0", - "supercomputerIds": [], - "workspaceApiUri": "https://wrksptest45.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/wrksptest45", - "workspaceIdentity": { - "id": "Sanitized" - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - }, - { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "eastus", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-02-26T00:24:42.4610232Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-02-26T00:24:42.4610232Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-testwseusalch-69oxho", - "supercomputerIds": [], - "workspaceApiUri": "https://testwseusalch.workspace.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/testwseusalch", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "76442198-a696-4763-a8da-c63b99d25643", - "clientId": "3cec3bd3-5f2a-4158-b60b-264f105008ab" - }, - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Failed" - } - } - ] - } - } - ], - "Variables": {} -} \ No newline at end of file diff --git a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json b/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json deleted file mode 100644 index 9fc4744b029a..000000000000 --- a/sdk/discovery/azure-mgmt-discovery/tests/recordings/test_workspaces.pyTestWorkspacestest_update_workspace.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "Entries": [ - { - "RequestUri": "https://Sanitized.management.azure.com/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.Discovery/workspaces/test-wrksp-create01?api-version=2026-02-01-preview", - "RequestMethod": "PATCH", - "RequestHeaders": { - "Accept": "application/json", - "Connection": "keep-alive", - "User-Agent": "azsdk-python-mgmt-discovery/1.0.0b1 Python/3.11.9 (Windows-10-10.0.26200-SP0)", - "Content-Type": "application/json", - "Content-Length": "105" - }, - "RequestBody": { - "properties": { - "keyVaultProperties": { - "keyName": "discoverykey", - "keyVersion": "956de2fc802f49eba81ddcc348ebc27c" - } - } - }, - "StatusCode": 200, - "ResponseHeaders": { - "Cache-Control": "no-cache", - "Pragma": "no-cache", - "Content-Length": "1606", - "Content-Type": "application/json; charset=utf-8", - "Expires": "-1", - "ETag": "\"9c009492-0000-1000-0000-69a89c8d0000\"", - "x-ms-ratelimit-remaining-subscription-writes": "799", - "x-ms-providerhub-traffic": "True", - "x-ms-request-id": "cf2c1ada-fb0d-40fb-b828-62e1f202bc62", - "x-ms-correlation-request-id": "45f38a46-23f0-4c4b-bd4b-3bf715f1ce71", - "x-ms-routing-request-id": "EASTUS2EUAP:20260304T205645Z:45f38a46-23f0-4c4b-bd4b-3bf715f1ce71", - "Strict-Transport-Security": "max-age=31536000; includeSubDomains", - "X-Content-Type-Options": "nosniff", - "X-Cache": "CONFIG_NOCACHE", - "X-MSEdge-Ref": "Ref A: A7C13ADCCB2641748C4748520B0E5448 Ref B: SN4AA2022305021 Ref C: 2026-03-04T20:56:43Z", - "Date": "Wed, 04 Mar 2026 20:56:45 GMT" - }, - "ResponseBody": { - "id": "Sanitized", - "name": "Sanitized", - "type": "microsoft.discovery/workspaces", - "location": "uksouth", - "tags": {}, - "systemData": { - "createdBy": "Sanitized", - "createdByType": "User", - "createdAt": "2026-03-04T19:36:15.7991238Z", - "lastModifiedBy": "Sanitized", - "lastModifiedByType": "User", - "lastModifiedAt": "2026-03-04T20:56:44.2310241Z" - }, - "properties": { - "managedResourceGroup": "mrg-dwsp-test-wrksp-create01-w3qqoq", - "supercomputerIds": [], - "workspaceApiUri": "https://test-wrksp-create01.workspace-dev.discovery.azure.com", - "workspaceUiUri": "https://studio.discovery.microsoft.com/workspaces/test-wrksp-create01", - "workspaceIdentity": { - "id": "Sanitized", - "principalId": "43959abc-ba88-4e2f-9f69-a7d3cc984c75", - "clientId": "3fe9d307-c0dc-410b-8dfe-116741e50bfb" - }, - "customerManagedKeys": "Enabled", - "keyVaultProperties": { - "keyVaultUri": "https://newapik.vault.azure.net/", - "keyName": "discoverykey", - "keyVersion": "956de2fc802f49eba81ddcc348ebc27c" - }, - "logAnalyticsClusterId": "/subscriptions/31b0b6a5-2647-47eb-8a38-7d12047ee8ec/resourceGroups/olawal/providers/Microsoft.OperationalInsights/clusters/mycluse", - "managedOnBehalfOfConfiguration": { - "moboBrokerResources": [ - { - "id": "Sanitized" - } - ] - }, - "provisioningState": "Succeeded" - } - } - } - ], - "Variables": {} -} From 407444cb0b97ae8e7fe76eb91e583fc6a5cb49e3 Mon Sep 17 00:00:00 2001 From: Oluwaseyi Lawal Date: Tue, 10 Mar 2026 16:50:11 -0500 Subject: [PATCH 9/9] . --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index acdfb06169fe..1a2614a57246 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -343,6 +343,9 @@ # PRLabel: %Monitor /sdk/monitor/azure-monitor-ingestion/ @Azure/azure-sdk-write-monitor-data-plane +# PRLabel: %Cognitive - Content Understanding +/sdk/contentunderstanding/ @bojunehsu @changjian-wang @chienyuanchang @yungshinlintw + # PRLabel: %Monitor /sdk/monitor/azure-monitor-query/ @Azure/azure-sdk-write-monitor-query-logs