From 6ed1c9ca192913ea42516106028d5c8e2e0074b9 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Tue, 17 Mar 2026 05:35:15 +0000 Subject: [PATCH] Configurations: 'specification/storagemover/StorageMover.Management/tspconfig.yaml', API Version: 2025-12-01, SDK Release Type: stable, and CommitSHA: 'b8861b82d3b47a8cf9c2dfae4a29ec9ff41eacbf' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=6019525 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. --- .../azure-mgmt-storagemover/CHANGELOG.md | 38 + .../azure-mgmt-storagemover/MANIFEST.in | 8 +- .../azure-mgmt-storagemover/_metadata.json | 9 +- .../apiview-properties.json | 21 + .../azure-mgmt-storagemover/azure/__init__.py | 2 +- .../azure/mgmt/__init__.py | 2 +- .../azure/mgmt/storagemover/_client.py | 9 +- .../azure/mgmt/storagemover/_configuration.py | 7 +- .../mgmt/storagemover/_utils/model_base.py | 141 ++- .../mgmt/storagemover/_utils/serialization.py | 19 +- .../azure/mgmt/storagemover/_validation.py | 66 + .../azure/mgmt/storagemover/_version.py | 2 +- .../azure/mgmt/storagemover/aio/_client.py | 9 +- .../mgmt/storagemover/aio/_configuration.py | 7 +- .../storagemover/aio/operations/__init__.py | 2 + .../aio/operations/_operations.py | 831 ++++++++++++- .../mgmt/storagemover/models/__init__.py | 26 + .../azure/mgmt/storagemover/models/_enums.py | 121 ++ .../azure/mgmt/storagemover/models/_models.py | 588 ++++++++- .../mgmt/storagemover/operations/__init__.py | 2 + .../storagemover/operations/_operations.py | 1101 ++++++++++++++--- .../agents_create_or_update_maximum_set.py | 2 +- .../agents_create_or_update_minimum_set.py | 2 +- ..._update_upload_limit_schedule_overnight.py | 2 +- .../generated_samples/agents_delete.py | 2 +- .../agents_get_maximum_set.py | 2 +- .../agents_get_minimum_set.py | 2 +- .../agents_list_maximum_set.py | 2 +- .../agents_list_minimum_set.py | 2 +- .../generated_samples/agents_update.py | 2 +- .../connections_create_or_update.py | 50 + .../generated_samples/connections_delete.py | 42 + .../generated_samples/connections_get.py | 43 + .../generated_samples/connections_list.py | 43 + ...e_or_update_azure_multi_cloud_connector.py | 3 +- ..._or_update_azure_storage_blob_container.py | 3 +- ..._or_update_azure_storage_nfs_file_share.py | 3 +- ..._or_update_azure_storage_smb_file_share.py | 3 +- .../endpoints_create_or_update_nfs_mount.py | 3 +- ...endpoints_create_or_update_s3_with_hmac.py | 57 + .../endpoints_create_or_update_smb_mount.py | 3 +- .../generated_samples/endpoints_delete.py | 2 +- ...dpoints_get_azure_multi_cloud_connector.py | 2 +- ...points_get_azure_storage_blob_container.py | 2 +- ...points_get_azure_storage_nfs_file_share.py | 2 +- ...points_get_azure_storage_smb_file_share.py | 2 +- .../endpoints_get_nfs_mount.py | 2 +- .../endpoints_get_s3_with_hmac.py | 43 + .../endpoints_get_smb_mount.py | 2 +- .../generated_samples/endpoints_list.py | 2 +- ...ints_update_azure_multi_cloud_connector.py | 2 +- ...nts_update_azure_storage_blob_container.py | 2 +- ...nts_update_azure_storage_nfs_file_share.py | 2 +- ...nts_update_azure_storage_smb_file_share.py | 2 +- .../endpoints_update_nfs_mount.py | 2 +- .../endpoints_update_s3_with_hmac.py | 44 + .../endpoints_update_smb_mount.py | 2 +- .../job_definitions_create_or_update.py | 6 +- ...nitions_create_or_update_cloud_to_cloud.py | 6 +- ...initions_create_or_update_with_schedule.py | 68 + .../job_definitions_delete.py | 2 +- .../generated_samples/job_definitions_get.py | 2 +- .../job_definitions_get_with_schedule.py | 44 + .../generated_samples/job_definitions_list.py | 2 +- .../job_definitions_start_job.py | 2 +- .../job_definitions_stop_job.py | 2 +- .../job_definitions_update.py | 11 +- .../generated_samples/job_runs_get.py | 2 +- .../job_runs_get_with_schedule.py | 45 + .../generated_samples/job_runs_list.py | 2 +- .../generated_samples/operations_list.py | 2 +- .../projects_create_or_update.py | 2 +- .../generated_samples/projects_delete.py | 2 +- .../generated_samples/projects_get.py | 2 +- .../generated_samples/projects_list.py | 2 +- .../generated_samples/projects_update.py | 2 +- .../storage_movers_create_or_update.py | 2 +- .../storage_movers_delete.py | 2 +- .../generated_samples/storage_movers_get.py | 2 +- .../generated_samples/storage_movers_list.py | 2 +- .../storage_movers_list_by_subscription.py | 2 +- .../storage_movers_update.py | 2 +- ...orage_mover_mgmt_connections_operations.py | 88 ++ ...mover_mgmt_connections_operations_async.py | 91 ++ ...e_mover_mgmt_job_definitions_operations.py | 23 +- ...r_mgmt_job_definitions_operations_async.py | 23 +- .../azure-mgmt-storagemover/pyproject.toml | 70 ++ .../azure-mgmt-storagemover/setup.py | 83 -- .../azure-mgmt-storagemover/tsp-location.yaml | 2 +- 89 files changed, 3559 insertions(+), 436 deletions(-) create mode 100644 sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_validation.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_create_or_update.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_delete.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_get.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_list.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_s3_with_hmac.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_s3_with_hmac.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_s3_with_hmac.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_with_schedule.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get_with_schedule.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get_with_schedule.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations.py create mode 100644 sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations_async.py delete mode 100644 sdk/storagemover/azure-mgmt-storagemover/setup.py diff --git a/sdk/storagemover/azure-mgmt-storagemover/CHANGELOG.md b/sdk/storagemover/azure-mgmt-storagemover/CHANGELOG.md index 2b4bf6c7b4ee..39e90718a9f0 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/CHANGELOG.md +++ b/sdk/storagemover/azure-mgmt-storagemover/CHANGELOG.md @@ -1,5 +1,43 @@ # Release History +## 3.1.0 (2026-03-17) + +### Features Added + + - Client `StorageMoverMgmtClient` added operation group `connections` + - Model `AzureMultiCloudConnectorEndpointProperties` added property `endpoint_kind` + - Model `AzureStorageBlobContainerEndpointProperties` added property `endpoint_kind` + - Model `AzureStorageNfsFileShareEndpointProperties` added property `endpoint_kind` + - Model `AzureStorageSmbFileShareEndpointProperties` added property `endpoint_kind` + - Enum `CredentialType` added member `AZURE_KEY_VAULT_S3_WITH_HMAC` + - Model `EndpointBaseProperties` added property `endpoint_kind` + - Enum `EndpointType` added member `S3_WITH_HMAC` + - Model `JobDefinitionProperties` added property `connections` + - Model `JobDefinitionProperties` added property `schedule` + - Model `JobDefinitionProperties` added property `data_integrity_validation` + - Model `JobDefinitionProperties` added property `preserve_permissions` + - Model `JobDefinitionUpdateProperties` added property `connections` + - Model `JobDefinitionUpdateProperties` added property `data_integrity_validation` + - Model `JobRunProperties` added property `trigger_type` + - Model `JobRunProperties` added property `scheduled_execution_time` + - Model `JobRunProperties` added property `warnings` + - Model `NfsMountEndpointProperties` added property `endpoint_kind` + - Model `SmbMountEndpointProperties` added property `endpoint_kind` + - Added model `AzureKeyVaultS3WithHmacCredentials` + - Added model `Connection` + - Added model `ConnectionProperties` + - Added enum `ConnectionStatus` + - Added enum `DataIntegrityValidation` + - Added enum `EndpointKind` + - Added enum `Frequency` + - Added model `JobRunWarning` + - Added model `S3WithHmacEndpointProperties` + - Added model `S3WithHmacEndpointUpdateProperties` + - Added enum `S3WithHmacSourceType` + - Added model `ScheduleInfo` + - Added enum `TriggerType` + - Added model `ConnectionsOperations` + ## 3.0.0 (2025-09-04) ### Features Added diff --git a/sdk/storagemover/azure-mgmt-storagemover/MANIFEST.in b/sdk/storagemover/azure-mgmt-storagemover/MANIFEST.in index 38ef6860fb22..bc70e875c993 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/MANIFEST.in +++ b/sdk/storagemover/azure-mgmt-storagemover/MANIFEST.in @@ -1,7 +1,7 @@ -recursive-include tests *.py *.json -recursive-include samples *.py *.md include *.md -include azure/__init__.py -include azure/mgmt/__init__.py include LICENSE include azure/mgmt/storagemover/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/mgmt/__init__.py diff --git a/sdk/storagemover/azure-mgmt-storagemover/_metadata.json b/sdk/storagemover/azure-mgmt-storagemover/_metadata.json index a89fe4014f59..50803ea67770 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/_metadata.json +++ b/sdk/storagemover/azure-mgmt-storagemover/_metadata.json @@ -1,7 +1,10 @@ { - "apiVersion": "2025-07-01", - "commit": "af759847e0abab741437d695782ad62d7b2cce14", + "apiVersion": "2025-12-01", + "apiVersions": { + "Microsoft.StorageMover": "2025-12-01" + }, + "commit": "b8861b82d3b47a8cf9c2dfae4a29ec9ff41eacbf", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "typespec_src": "specification/storagemover/StorageMover.Management", - "emitterVersion": "0.49.0" + "emitterVersion": "0.61.0" } \ No newline at end of file diff --git a/sdk/storagemover/azure-mgmt-storagemover/apiview-properties.json b/sdk/storagemover/azure-mgmt-storagemover/apiview-properties.json index 54cad1e1d6de..fd3e49d68b94 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/apiview-properties.json +++ b/sdk/storagemover/azure-mgmt-storagemover/apiview-properties.json @@ -9,6 +9,7 @@ "azure.mgmt.storagemover.models.AgentUpdateParameters": "Microsoft.StorageMover.AgentUpdateParameters", "azure.mgmt.storagemover.models.AgentUpdateProperties": "Microsoft.StorageMover.AgentUpdateProperties", "azure.mgmt.storagemover.models.Credentials": "Microsoft.StorageMover.Credentials", + "azure.mgmt.storagemover.models.AzureKeyVaultS3WithHmacCredentials": "Microsoft.StorageMover.AzureKeyVaultS3WithHmacCredentials", "azure.mgmt.storagemover.models.AzureKeyVaultSmbCredentials": "Microsoft.StorageMover.AzureKeyVaultSmbCredentials", "azure.mgmt.storagemover.models.EndpointBaseProperties": "Microsoft.StorageMover.EndpointBaseProperties", "azure.mgmt.storagemover.models.AzureMultiCloudConnectorEndpointProperties": "Microsoft.StorageMover.AzureMultiCloudConnectorEndpointProperties", @@ -20,6 +21,8 @@ "azure.mgmt.storagemover.models.AzureStorageNfsFileShareEndpointUpdateProperties": "Microsoft.StorageMover.AzureStorageNfsFileShareEndpointUpdateProperties", "azure.mgmt.storagemover.models.AzureStorageSmbFileShareEndpointProperties": "Microsoft.StorageMover.AzureStorageSmbFileShareEndpointProperties", "azure.mgmt.storagemover.models.AzureStorageSmbFileShareEndpointUpdateProperties": "Microsoft.StorageMover.AzureStorageSmbFileShareEndpointUpdateProperties", + "azure.mgmt.storagemover.models.Connection": "Microsoft.StorageMover.Connection", + "azure.mgmt.storagemover.models.ConnectionProperties": "Microsoft.StorageMover.ConnectionProperties", "azure.mgmt.storagemover.models.Endpoint": "Microsoft.StorageMover.Endpoint", "azure.mgmt.storagemover.models.EndpointBaseUpdateParameters": "Microsoft.StorageMover.EndpointBaseUpdateParameters", "azure.mgmt.storagemover.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", @@ -34,6 +37,7 @@ "azure.mgmt.storagemover.models.JobRunError": "Microsoft.StorageMover.JobRunError", "azure.mgmt.storagemover.models.JobRunProperties": "Microsoft.StorageMover.JobRunProperties", "azure.mgmt.storagemover.models.JobRunResourceId": "Microsoft.StorageMover.JobRunResourceId", + "azure.mgmt.storagemover.models.JobRunWarning": "Microsoft.StorageMover.JobRunWarning", "azure.mgmt.storagemover.models.ManagedServiceIdentity": "Azure.ResourceManager.CommonTypes.ManagedServiceIdentity", "azure.mgmt.storagemover.models.NfsMountEndpointProperties": "Microsoft.StorageMover.NfsMountEndpointProperties", "azure.mgmt.storagemover.models.NfsMountEndpointUpdateProperties": "Microsoft.StorageMover.NfsMountEndpointUpdateProperties", @@ -44,6 +48,9 @@ "azure.mgmt.storagemover.models.ProjectUpdateParameters": "Microsoft.StorageMover.ProjectUpdateParameters", "azure.mgmt.storagemover.models.ProjectUpdateProperties": "Microsoft.StorageMover.ProjectUpdateProperties", "azure.mgmt.storagemover.models.Recurrence": "Microsoft.StorageMover.Recurrence", + "azure.mgmt.storagemover.models.S3WithHmacEndpointProperties": "Microsoft.StorageMover.S3WithHmacEndpointProperties", + "azure.mgmt.storagemover.models.S3WithHmacEndpointUpdateProperties": "Microsoft.StorageMover.S3WithHmacEndpointUpdateProperties", + "azure.mgmt.storagemover.models.ScheduleInfo": "Microsoft.StorageMover.ScheduleInfo", "azure.mgmt.storagemover.models.SmbMountEndpointProperties": "Microsoft.StorageMover.SmbMountEndpointProperties", "azure.mgmt.storagemover.models.SmbMountEndpointUpdateProperties": "Microsoft.StorageMover.SmbMountEndpointUpdateProperties", "azure.mgmt.storagemover.models.SourceEndpoint": "Microsoft.StorageMover.SourceEndpoint", @@ -70,13 +77,19 @@ "azure.mgmt.storagemover.models.Minute": "Microsoft.StorageMover.Minute", "azure.mgmt.storagemover.models.DayOfWeek": "Microsoft.StorageMover.DayOfWeek", "azure.mgmt.storagemover.models.EndpointType": "Microsoft.StorageMover.EndpointType", + "azure.mgmt.storagemover.models.EndpointKind": "Microsoft.StorageMover.EndpointKind", "azure.mgmt.storagemover.models.NfsVersion": "Microsoft.StorageMover.NfsVersion", "azure.mgmt.storagemover.models.CredentialType": "Microsoft.StorageMover.CredentialType", + "azure.mgmt.storagemover.models.S3WithHmacSourceType": "Microsoft.StorageMover.S3WithHmacSourceType", "azure.mgmt.storagemover.models.ManagedServiceIdentityType": "Azure.ResourceManager.CommonTypes.ManagedServiceIdentityType", "azure.mgmt.storagemover.models.JobType": "Microsoft.StorageMover.JobType", "azure.mgmt.storagemover.models.CopyMode": "Microsoft.StorageMover.CopyMode", "azure.mgmt.storagemover.models.JobRunStatus": "Microsoft.StorageMover.JobRunStatus", + "azure.mgmt.storagemover.models.Frequency": "Microsoft.StorageMover.Frequency", + "azure.mgmt.storagemover.models.DataIntegrityValidation": "Microsoft.StorageMover.DataIntegrityValidation", + "azure.mgmt.storagemover.models.ConnectionStatus": "Microsoft.StorageMover.ConnectionStatus", "azure.mgmt.storagemover.models.JobRunScanStatus": "Microsoft.StorageMover.JobRunScanStatus", + "azure.mgmt.storagemover.models.TriggerType": "Microsoft.StorageMover.TriggerType", "azure.mgmt.storagemover.operations.Operations.list": "Azure.ResourceManager.Operations.list", "azure.mgmt.storagemover.aio.operations.Operations.list": "Azure.ResourceManager.Operations.list", "azure.mgmt.storagemover.operations.StorageMoversOperations.get": "Microsoft.StorageMover.StorageMovers.get", @@ -135,6 +148,14 @@ "azure.mgmt.storagemover.aio.operations.JobDefinitionsOperations.start_job": "Microsoft.StorageMover.JobDefinitions.startJob", "azure.mgmt.storagemover.operations.JobDefinitionsOperations.stop_job": "Microsoft.StorageMover.JobDefinitions.stopJob", "azure.mgmt.storagemover.aio.operations.JobDefinitionsOperations.stop_job": "Microsoft.StorageMover.JobDefinitions.stopJob", + "azure.mgmt.storagemover.operations.ConnectionsOperations.create_or_update": "Microsoft.StorageMover.Connections.createOrUpdate", + "azure.mgmt.storagemover.aio.operations.ConnectionsOperations.create_or_update": "Microsoft.StorageMover.Connections.createOrUpdate", + "azure.mgmt.storagemover.operations.ConnectionsOperations.get": "Microsoft.StorageMover.Connections.get", + "azure.mgmt.storagemover.aio.operations.ConnectionsOperations.get": "Microsoft.StorageMover.Connections.get", + "azure.mgmt.storagemover.operations.ConnectionsOperations.list": "Microsoft.StorageMover.Connections.list", + "azure.mgmt.storagemover.aio.operations.ConnectionsOperations.list": "Microsoft.StorageMover.Connections.list", + "azure.mgmt.storagemover.operations.ConnectionsOperations.begin_delete": "Microsoft.StorageMover.Connections.delete", + "azure.mgmt.storagemover.aio.operations.ConnectionsOperations.begin_delete": "Microsoft.StorageMover.Connections.delete", "azure.mgmt.storagemover.operations.JobRunsOperations.get": "Microsoft.StorageMover.JobRuns.get", "azure.mgmt.storagemover.aio.operations.JobRunsOperations.get": "Microsoft.StorageMover.JobRuns.get", "azure.mgmt.storagemover.operations.JobRunsOperations.list": "Microsoft.StorageMover.JobRuns.list", diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/__init__.py b/sdk/storagemover/azure-mgmt-storagemover/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/__init__.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/__init__.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/__init__.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_client.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_client.py index ce173449cdc5..8b8fb516d225 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_client.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_client.py @@ -21,6 +21,7 @@ from ._utils.serialization import Deserializer, Serializer from .operations import ( AgentsOperations, + ConnectionsOperations, EndpointsOperations, JobDefinitionsOperations, JobRunsOperations, @@ -49,6 +50,8 @@ class StorageMoverMgmtClient: # pylint: disable=too-many-instance-attributes :vartype projects: azure.mgmt.storagemover.operations.ProjectsOperations :ivar job_definitions: JobDefinitionsOperations operations :vartype job_definitions: azure.mgmt.storagemover.operations.JobDefinitionsOperations + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.mgmt.storagemover.operations.ConnectionsOperations :ivar job_runs: JobRunsOperations operations :vartype job_runs: azure.mgmt.storagemover.operations.JobRunsOperations :param credential: Credential used to authenticate requests to the service. Required. @@ -60,8 +63,9 @@ class StorageMoverMgmtClient: # pylint: disable=too-many-instance-attributes :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :paramtype cloud_setting: ~azure.core.AzureClouds - :keyword api_version: The API version to use for this operation. Default value is "2025-07-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-12-01" + and None. Default value is "2025-12-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -120,6 +124,7 @@ def __init__( self.endpoints = EndpointsOperations(self._client, self._config, self._serialize, self._deserialize) self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) self.job_definitions = JobDefinitionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.job_runs = JobRunsOperations(self._client, self._config, self._serialize, self._deserialize) def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_configuration.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_configuration.py index bb035d444c97..f45662bd5938 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_configuration.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_configuration.py @@ -33,8 +33,9 @@ class StorageMoverMgmtClientConfiguration: # pylint: disable=too-many-instance- :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :type cloud_setting: ~azure.core.AzureClouds - :keyword api_version: The API version to use for this operation. Default value is "2025-07-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-12-01" + and None. Default value is "2025-12-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ @@ -46,7 +47,7 @@ def __init__( cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2025-07-01") + api_version: str = kwargs.pop("api_version", "2025-12-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/model_base.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/model_base.py index 12926fa98dcf..7b7f8ba67b53 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/model_base.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/model_base.py @@ -37,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -171,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -202,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -256,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -315,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -353,9 +371,39 @@ def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -467,6 +515,8 @@ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: return self._data.setdefault(key, default) def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data try: other_model = self.__class__(other) except Exception: @@ -483,6 +533,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -578,6 +630,9 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: if len(items) > 0: existed_attr_keys.append(xml_name) dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = [] continue # text element is primitive type @@ -758,6 +813,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -767,6 +830,19 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) @@ -817,16 +893,18 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -910,16 +988,20 @@ def _deserialize_with_callable( return float(value.text) if value.text else None if deserializer is bool: return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None if deserializer is None: return value if deserializer in [int, float, bool]: return deserializer(value) if isinstance(deserializer, CaseInsensitiveEnumMeta): try: - return deserializer(value) + return deserializer(value.text if isinstance(value, ET.Element) else value) except ValueError: # for unknown value, return raw value - return value + return value.text if isinstance(value, ET.Element) else value if isinstance(deserializer, type) and issubclass(deserializer, Model): return deserializer._deserialize(value, []) return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) @@ -952,7 +1034,7 @@ def _failsafe_deserialize( ) -> typing.Any: try: return _deserialize(deserializer, response.json(), module, rf, format) - except DeserializationError: + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -965,13 +1047,14 @@ def _failsafe_deserialize_xml( ) -> typing.Any: try: return _deserialize_xml(deserializer, response.text()) - except DeserializationError: + except Exception: # pylint: disable=broad-except _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) return None +# pylint: disable=too-many-instance-attributes class _RestField: def __init__( self, @@ -991,6 +1074,7 @@ def __init__( self._is_discriminator = is_discriminator self._visibility = visibility self._is_model = False + self._is_optional = False self._default = default self._format = format self._is_multipart_file_input = is_multipart_file_input @@ -998,7 +1082,11 @@ def __init__( @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -1009,14 +1097,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -1184,7 +1295,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1197,7 +1308,9 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: if prefix and ns: ET.register_namespace(prefix, ns) if ns: diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/serialization.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/serialization.py index e81921cbb011..81ec1de5922b 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/serialization.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_utils/serialization.py @@ -787,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises TypeError: if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_validation.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_validation.py new file mode 100644 index 000000000000..f5af3a4eb8a2 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_version.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_version.py index 7b1116c95ad7..246b0e84ae3d 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_version.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "3.0.0" +VERSION = "3.1.0" diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_client.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_client.py index ea8b9abd4da6..8bacf962d5fe 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_client.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_client.py @@ -21,6 +21,7 @@ from ._configuration import StorageMoverMgmtClientConfiguration from .operations import ( AgentsOperations, + ConnectionsOperations, EndpointsOperations, JobDefinitionsOperations, JobRunsOperations, @@ -49,6 +50,8 @@ class StorageMoverMgmtClient: # pylint: disable=too-many-instance-attributes :vartype projects: azure.mgmt.storagemover.aio.operations.ProjectsOperations :ivar job_definitions: JobDefinitionsOperations operations :vartype job_definitions: azure.mgmt.storagemover.aio.operations.JobDefinitionsOperations + :ivar connections: ConnectionsOperations operations + :vartype connections: azure.mgmt.storagemover.aio.operations.ConnectionsOperations :ivar job_runs: JobRunsOperations operations :vartype job_runs: azure.mgmt.storagemover.aio.operations.JobRunsOperations :param credential: Credential used to authenticate requests to the service. Required. @@ -60,8 +63,9 @@ class StorageMoverMgmtClient: # pylint: disable=too-many-instance-attributes :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :paramtype cloud_setting: ~azure.core.AzureClouds - :keyword api_version: The API version to use for this operation. Default value is "2025-07-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-12-01" + and None. Default value is "2025-12-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -122,6 +126,7 @@ def __init__( self.endpoints = EndpointsOperations(self._client, self._config, self._serialize, self._deserialize) self.projects = ProjectsOperations(self._client, self._config, self._serialize, self._deserialize) self.job_definitions = JobDefinitionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.connections = ConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.job_runs = JobRunsOperations(self._client, self._config, self._serialize, self._deserialize) def send_request( diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_configuration.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_configuration.py index 4b2b928682c1..a3ec20aeb481 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_configuration.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/_configuration.py @@ -33,8 +33,9 @@ class StorageMoverMgmtClientConfiguration: # pylint: disable=too-many-instance- :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is None. :type cloud_setting: ~azure.core.AzureClouds - :keyword api_version: The API version to use for this operation. Default value is "2025-07-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Known values are "2025-12-01" + and None. Default value is "2025-12-01". Note that overriding this default value may result in + unsupported behavior. :paramtype api_version: str """ @@ -46,7 +47,7 @@ def __init__( cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2025-07-01") + api_version: str = kwargs.pop("api_version", "2025-12-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/__init__.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/__init__.py index 5f500539f0f7..53e1d7223645 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/__init__.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/__init__.py @@ -18,6 +18,7 @@ from ._operations import EndpointsOperations # type: ignore from ._operations import ProjectsOperations # type: ignore from ._operations import JobDefinitionsOperations # type: ignore +from ._operations import ConnectionsOperations # type: ignore from ._operations import JobRunsOperations # type: ignore from ._patch import __all__ as _patch_all @@ -31,6 +32,7 @@ "EndpointsOperations", "ProjectsOperations", "JobDefinitionsOperations", + "ConnectionsOperations", "JobRunsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/_operations.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/_operations.py index b21b07478afc..90227af31716 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/_operations.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/aio/operations/_operations.py @@ -36,12 +36,17 @@ from ... import models as _models from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from ..._utils.serialization import Deserializer, Serializer +from ..._validation import api_version_validation from ...operations._operations import ( build_agents_create_or_update_request, build_agents_delete_request, build_agents_get_request, build_agents_list_request, build_agents_update_request, + build_connections_create_or_update_request, + build_connections_delete_request, + build_connections_get_request, + build_connections_list_request, build_endpoints_create_or_update_request, build_endpoints_delete_request, build_endpoints_get_request, @@ -71,10 +76,10 @@ ) from .._configuration import StorageMoverMgmtClientConfiguration -List = list T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] JSON = MutableMapping[str, Any] +List = list class Operations: @@ -154,7 +159,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -170,7 +178,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -234,6 +245,7 @@ async def get(self, resource_group_name: str, storage_mover_name: str, **kwargs: } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -248,11 +260,14 @@ async def get(self, resource_group_name: str, storage_mover_name: str, **kwargs: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -399,6 +414,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -413,11 +429,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -570,6 +589,7 @@ async def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -584,11 +604,14 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -626,6 +649,7 @@ async def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -639,7 +663,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -647,7 +674,7 @@ async def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -778,7 +805,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.StorageMover], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.StorageMover], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -794,7 +824,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -862,7 +895,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.StorageMover], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.StorageMover], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -878,7 +914,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -947,6 +986,7 @@ async def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -961,11 +1001,14 @@ async def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -1129,6 +1172,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1143,11 +1187,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -1307,6 +1354,7 @@ async def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1321,11 +1369,14 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -1364,6 +1415,7 @@ async def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1377,7 +1429,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -1385,7 +1440,7 @@ async def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1522,7 +1577,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Agent], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Agent], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -1538,7 +1596,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -1607,6 +1668,7 @@ async def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1621,11 +1683,14 @@ async def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -1789,6 +1854,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1803,11 +1869,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -1973,6 +2042,7 @@ async def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1987,11 +2057,14 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -2030,6 +2103,7 @@ async def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2043,7 +2117,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -2051,7 +2128,7 @@ async def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2190,7 +2267,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Endpoint], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Endpoint], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -2206,7 +2286,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -2275,6 +2358,7 @@ async def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2289,11 +2373,14 @@ async def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -2453,6 +2540,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2467,11 +2555,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -2636,6 +2727,7 @@ async def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2650,11 +2742,14 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -2693,6 +2788,7 @@ async def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2706,7 +2802,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -2714,7 +2813,7 @@ async def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2853,7 +2952,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Project], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -2869,7 +2971,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -2946,6 +3051,7 @@ async def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2960,11 +3066,14 @@ async def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -3141,6 +3250,7 @@ async def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3155,11 +3265,14 @@ async def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -3338,6 +3451,7 @@ async def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3352,11 +3466,14 @@ async def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -3401,6 +3518,7 @@ async def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3414,7 +3532,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -3422,7 +3543,7 @@ async def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3572,7 +3693,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobDefinition], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.JobDefinition], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3588,7 +3712,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -3648,6 +3775,7 @@ async def start_job( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3662,11 +3790,14 @@ async def start_job( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobRunResourceId, response.json()) @@ -3727,6 +3858,7 @@ async def stop_job( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3741,11 +3873,14 @@ async def stop_job( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobRunResourceId, response.json()) @@ -3755,6 +3890,566 @@ async def stop_job( return deserialized # type: ignore +class ConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storagemover.aio.StorageMoverMgmtClient`'s + :attr:`connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageMoverMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + async def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: _models.Connection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: ~azure.mgmt.storagemover.models.Connection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + async def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: Union[_models.Connection, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Is one of the following types: Connection, JSON, IO[bytes] Required. + :type connection: ~azure.mgmt.storagemover.models.Connection or JSON or IO[bytes] + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(connection, (IOBase, bytes)): + _content = connection + else: + _content = json.dumps(connection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connections_create_or_update_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + "accept", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + async def get( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> _models.Connection: + """Gets a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "storage_mover_name", "accept"] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def list( + self, resource_group_name: str, storage_mover_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Connection"]: + """Lists all Connections in a Storage Mover. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :return: An iterator like instance of Connection + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagemover.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Connection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + async def _delete_initial( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_connections_delete_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + async def begin_delete( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a Connection resource. Returns 409 if there are active jobs using this connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + class JobRunsOperations: """ .. warning:: @@ -3828,6 +4523,7 @@ async def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3842,11 +4538,14 @@ async def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobRun, response.json()) @@ -3936,7 +4635,10 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobRun], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.JobRun], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3952,7 +4654,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/__init__.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/__init__.py index bdfb69f7ce69..17d3b5e0d601 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/__init__.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/__init__.py @@ -19,6 +19,7 @@ AgentPropertiesErrorDetails, AgentUpdateParameters, AgentUpdateProperties, + AzureKeyVaultS3WithHmacCredentials, AzureKeyVaultSmbCredentials, AzureMultiCloudConnectorEndpointProperties, AzureMultiCloudConnectorEndpointUpdateProperties, @@ -28,6 +29,8 @@ AzureStorageNfsFileShareEndpointUpdateProperties, AzureStorageSmbFileShareEndpointProperties, AzureStorageSmbFileShareEndpointUpdateProperties, + Connection, + ConnectionProperties, Credentials, Endpoint, EndpointBaseProperties, @@ -45,6 +48,7 @@ JobRunError, JobRunProperties, JobRunResourceId, + JobRunWarning, ManagedServiceIdentity, NfsMountEndpointProperties, NfsMountEndpointUpdateProperties, @@ -57,6 +61,9 @@ ProxyResource, Recurrence, Resource, + S3WithHmacEndpointProperties, + S3WithHmacEndpointUpdateProperties, + ScheduleInfo, SmbMountEndpointProperties, SmbMountEndpointUpdateProperties, SourceEndpoint, @@ -80,11 +87,15 @@ from ._enums import ( # type: ignore ActionType, AgentStatus, + ConnectionStatus, CopyMode, CreatedByType, CredentialType, + DataIntegrityValidation, DayOfWeek, + EndpointKind, EndpointType, + Frequency, JobRunScanStatus, JobRunStatus, JobType, @@ -93,6 +104,8 @@ NfsVersion, Origin, ProvisioningState, + S3WithHmacSourceType, + TriggerType, ) from ._patch import __all__ as _patch_all from ._patch import * @@ -104,6 +117,7 @@ "AgentPropertiesErrorDetails", "AgentUpdateParameters", "AgentUpdateProperties", + "AzureKeyVaultS3WithHmacCredentials", "AzureKeyVaultSmbCredentials", "AzureMultiCloudConnectorEndpointProperties", "AzureMultiCloudConnectorEndpointUpdateProperties", @@ -113,6 +127,8 @@ "AzureStorageNfsFileShareEndpointUpdateProperties", "AzureStorageSmbFileShareEndpointProperties", "AzureStorageSmbFileShareEndpointUpdateProperties", + "Connection", + "ConnectionProperties", "Credentials", "Endpoint", "EndpointBaseProperties", @@ -130,6 +146,7 @@ "JobRunError", "JobRunProperties", "JobRunResourceId", + "JobRunWarning", "ManagedServiceIdentity", "NfsMountEndpointProperties", "NfsMountEndpointUpdateProperties", @@ -142,6 +159,9 @@ "ProxyResource", "Recurrence", "Resource", + "S3WithHmacEndpointProperties", + "S3WithHmacEndpointUpdateProperties", + "ScheduleInfo", "SmbMountEndpointProperties", "SmbMountEndpointUpdateProperties", "SourceEndpoint", @@ -162,11 +182,15 @@ "WeeklyRecurrence", "ActionType", "AgentStatus", + "ConnectionStatus", "CopyMode", "CreatedByType", "CredentialType", + "DataIntegrityValidation", "DayOfWeek", + "EndpointKind", "EndpointType", + "Frequency", "JobRunScanStatus", "JobRunStatus", "JobType", @@ -175,6 +199,8 @@ "NfsVersion", "Origin", "ProvisioningState", + "S3WithHmacSourceType", + "TriggerType", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_enums.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_enums.py index dc73d0b4ac99..76de630dd516 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_enums.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_enums.py @@ -23,18 +23,41 @@ class AgentStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The Agent status.""" REGISTERING = "Registering" + """REGISTERING.""" OFFLINE = "Offline" + """OFFLINE.""" ONLINE = "Online" + """ONLINE.""" EXECUTING = "Executing" + """EXECUTING.""" REQUIRES_ATTENTION = "RequiresAttention" + """REQUIRES_ATTENTION.""" UNREGISTERING = "Unregistering" + """UNREGISTERING.""" + + +class ConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The connection status.""" + + APPROVED = "Approved" + """APPROVED.""" + REJECTED = "Rejected" + """REJECTED.""" + DISCONNECTED = "Disconnected" + """DISCONNECTED.""" + PENDING = "Pending" + """PENDING.""" + STALE = "Stale" + """STALE.""" class CopyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Strategy to use for copy.""" ADDITIVE = "Additive" + """ADDITIVE.""" MIRROR = "Mirror" + """MIRROR.""" class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -54,58 +77,123 @@ class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The Credentials type.""" AZURE_KEY_VAULT_SMB = "AzureKeyVaultSmb" + """AZURE_KEY_VAULT_SMB.""" + AZURE_KEY_VAULT_S3_WITH_HMAC = "AzureKeyVaultS3WithHMAC" + """AZURE_KEY_VAULT_S3_WITH_HMAC.""" + + +class DataIntegrityValidation(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Data integrity validation mode.""" + + SAVE_VERIFY_FILE_MD5 = "SaveVerifyFileMD5" + """SAVE_VERIFY_FILE_MD5.""" + SAVE_FILE_MD5 = "SaveFileMD5" + """SAVE_FILE_MD5.""" + NONE = "None" + """NONE.""" class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The day of week.""" MONDAY = "Monday" + """MONDAY.""" TUESDAY = "Tuesday" + """TUESDAY.""" WEDNESDAY = "Wednesday" + """WEDNESDAY.""" THURSDAY = "Thursday" + """THURSDAY.""" FRIDAY = "Friday" + """FRIDAY.""" SATURDAY = "Saturday" + """SATURDAY.""" SUNDAY = "Sunday" + """SUNDAY.""" + + +class EndpointKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the endpoint source/target.""" + + SOURCE = "Source" + """SOURCE.""" + TARGET = "Target" + """TARGET.""" class EndpointType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The Endpoint resource type.""" AZURE_STORAGE_BLOB_CONTAINER = "AzureStorageBlobContainer" + """AZURE_STORAGE_BLOB_CONTAINER.""" NFS_MOUNT = "NfsMount" + """NFS_MOUNT.""" AZURE_STORAGE_SMB_FILE_SHARE = "AzureStorageSmbFileShare" + """AZURE_STORAGE_SMB_FILE_SHARE.""" SMB_MOUNT = "SmbMount" + """SMB_MOUNT.""" AZURE_MULTI_CLOUD_CONNECTOR = "AzureMultiCloudConnector" + """AZURE_MULTI_CLOUD_CONNECTOR.""" AZURE_STORAGE_NFS_FILE_SHARE = "AzureStorageNfsFileShare" + """AZURE_STORAGE_NFS_FILE_SHARE.""" + S3_WITH_HMAC = "S3WithHMAC" + """S3_WITH_HMAC.""" + + +class Frequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of schedule — Monthly, Weekly, or Daily.""" + + MONTHLY = "Monthly" + """MONTHLY.""" + WEEKLY = "Weekly" + """WEEKLY.""" + DAILY = "Daily" + """DAILY.""" + ONETIME = "Onetime" + """ONETIME.""" class JobRunScanStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of Agent's scanning of source.""" NOT_STARTED = "NotStarted" + """NOT_STARTED.""" SCANNING = "Scanning" + """SCANNING.""" COMPLETED = "Completed" + """COMPLETED.""" class JobRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The current status of the Job Run in a non-terminal state, if exists.""" QUEUED = "Queued" + """QUEUED.""" STARTED = "Started" + """STARTED.""" RUNNING = "Running" + """RUNNING.""" CANCEL_REQUESTED = "CancelRequested" + """CANCEL_REQUESTED.""" CANCELING = "Canceling" + """CANCELING.""" CANCELED = "Canceled" + """CANCELED.""" FAILED = "Failed" + """FAILED.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" PAUSED_BY_BANDWIDTH_MANAGEMENT = "PausedByBandwidthManagement" + """PAUSED_BY_BANDWIDTH_MANAGEMENT.""" class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of the Job.""" ON_PREM_TO_CLOUD = "OnPremToCloud" + """ON_PREM_TO_CLOUD.""" CLOUD_TO_CLOUD = "CloudToCloud" + """CLOUD_TO_CLOUD.""" class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -129,15 +217,20 @@ class Minute(int, Enum, metaclass=CaseInsensitiveEnumMeta): """ ZERO = 0 + """ZERO.""" THIRTY = 30 + """THIRTY.""" class NfsVersion(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The NFS protocol version.""" NF_SAUTO = "NFSauto" + """NF_SAUTO.""" NF_SV3 = "NFSv3" + """NF_SV3.""" NF_SV4 = "NFSv4" + """NF_SV4.""" class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -157,6 +250,34 @@ class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The provisioning state of a resource.""" SUCCEEDED = "Succeeded" + """SUCCEEDED.""" CANCELED = "Canceled" + """CANCELED.""" FAILED = "Failed" + """FAILED.""" DELETING = "Deleting" + """DELETING.""" + + +class S3WithHmacSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The source type of S3WithHmac endpoint.""" + + MINIO = "MINIO" + """MINIO.""" + BACKBLAZE = "BACKBLAZE" + """BACKBLAZE.""" + IBM = "IBM" + """IBM.""" + CLOUDFLARE = "CLOUDFLARE" + """CLOUDFLARE.""" + GCS = "GCS" + """GCS.""" + + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of Job run trigger Manual or Scheduled.""" + + MANUAL = "Manual" + """MANUAL.""" + SCHEDULED = "Scheduled" + """SCHEDULED.""" diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_models.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_models.py index 510d0602c1cd..ace6346afea3 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_models.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/models/_models.py @@ -365,15 +365,17 @@ class Credentials(_Model): """The Credentials. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureKeyVaultSmbCredentials + AzureKeyVaultS3WithHmacCredentials, AzureKeyVaultSmbCredentials - :ivar type: The Credentials type. Required. "AzureKeyVaultSmb" + :ivar type: The Credentials type. Required. Known values are: "AzureKeyVaultSmb" and + "AzureKeyVaultS3WithHMAC". :vartype type: str or ~azure.mgmt.storagemover.models.CredentialType """ __mapping__: dict[str, _Model] = {} type: str = rest_discriminator(name="type", visibility=["read", "create"]) - """The Credentials type. Required. \"AzureKeyVaultSmb\"""" + """The Credentials type. Required. Known values are: \"AzureKeyVaultSmb\" and + \"AzureKeyVaultS3WithHMAC\".""" @overload def __init__( @@ -393,6 +395,52 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class AzureKeyVaultS3WithHmacCredentials(Credentials, discriminator="AzureKeyVaultS3WithHMAC"): + """The Azure Key Vault secret URIs which store the credentials. + + :ivar access_key_uri: The Azure Key Vault secret URI which stores the username. Use empty + string to clean-up existing value. + :vartype access_key_uri: str + :ivar secret_key_uri: The Azure Key Vault secret URI which stores the password. Use empty + string to clean-up existing value. + :vartype secret_key_uri: str + :ivar type: The Credentials type. Required. AZURE_KEY_VAULT_S3_WITH_HMAC. + :vartype type: str or ~azure.mgmt.storagemover.models.AZURE_KEY_VAULT_S3_WITH_HMAC + """ + + access_key_uri: Optional[str] = rest_field( + name="accessKeyUri", visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Key Vault secret URI which stores the username. Use empty string to clean-up existing + value.""" + secret_key_uri: Optional[str] = rest_field( + name="secretKeyUri", visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Key Vault secret URI which stores the password. Use empty string to clean-up existing + value.""" + type: Literal[CredentialType.AZURE_KEY_VAULT_S3_WITH_HMAC] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The Credentials type. Required. AZURE_KEY_VAULT_S3_WITH_HMAC.""" + + @overload + def __init__( + self, + *, + access_key_uri: Optional[str] = None, + secret_key_uri: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.AZURE_KEY_VAULT_S3_WITH_HMAC # type: ignore + + class AzureKeyVaultSmbCredentials(Credentials, discriminator="AzureKeyVaultSmb"): """The Azure Key Vault secret URIs which store the credentials. @@ -402,7 +450,7 @@ class AzureKeyVaultSmbCredentials(Credentials, discriminator="AzureKeyVaultSmb") :ivar password_uri: The Azure Key Vault secret URI which stores the password. Use empty string to clean-up existing value. :vartype password_uri: str - :ivar type: The Credentials type. Required. + :ivar type: The Credentials type. Required. AZURE_KEY_VAULT_SMB. :vartype type: str or ~azure.mgmt.storagemover.models.AZURE_KEY_VAULT_SMB """ @@ -417,7 +465,7 @@ class AzureKeyVaultSmbCredentials(Credentials, discriminator="AzureKeyVaultSmb") """The Azure Key Vault secret URI which stores the password. Use empty string to clean-up existing value.""" type: Literal[CredentialType.AZURE_KEY_VAULT_SMB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Credentials type. Required.""" + """The Credentials type. Required. AZURE_KEY_VAULT_SMB.""" @overload def __init__( @@ -435,7 +483,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, type=CredentialType.AZURE_KEY_VAULT_SMB, **kwargs) + super().__init__(*args, **kwargs) + self.type = CredentialType.AZURE_KEY_VAULT_SMB # type: ignore class EndpointBaseProperties(_Model): @@ -444,14 +493,17 @@ class EndpointBaseProperties(_Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureMultiCloudConnectorEndpointProperties, AzureStorageBlobContainerEndpointProperties, AzureStorageNfsFileShareEndpointProperties, AzureStorageSmbFileShareEndpointProperties, - NfsMountEndpointProperties, SmbMountEndpointProperties + NfsMountEndpointProperties, S3WithHmacEndpointProperties, SmbMountEndpointProperties :ivar endpoint_type: The Endpoint resource type. Required. Known values are: "AzureStorageBlobContainer", "NfsMount", "AzureStorageSmbFileShare", "SmbMount", - "AzureMultiCloudConnector", and "AzureStorageNfsFileShare". + "AzureMultiCloudConnector", "AzureStorageNfsFileShare", and "S3WithHMAC". :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.EndpointType :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -460,10 +512,14 @@ class EndpointBaseProperties(_Model): __mapping__: dict[str, _Model] = {} endpoint_type: str = rest_discriminator(name="endpointType", visibility=["read", "create"]) """The Endpoint resource type. Required. Known values are: \"AzureStorageBlobContainer\", - \"NfsMount\", \"AzureStorageSmbFileShare\", \"SmbMount\", \"AzureMultiCloudConnector\", and - \"AzureStorageNfsFileShare\".""" + \"NfsMount\", \"AzureStorageSmbFileShare\", \"SmbMount\", \"AzureMultiCloudConnector\", + \"AzureStorageNfsFileShare\", and \"S3WithHMAC\".""" description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description for the Endpoint.""" + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = rest_field( + name="endpointKind", visibility=["read", "create"] + ) + """The Endpoint resource kind source or target. Known values are: \"Source\" and \"Target\".""" provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( name="provisioningState", visibility=["read"] ) @@ -476,6 +532,7 @@ def __init__( *, endpoint_type: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, ) -> None: ... @overload @@ -496,6 +553,9 @@ class AzureMultiCloudConnectorEndpointProperties( :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -504,7 +564,7 @@ class AzureMultiCloudConnectorEndpointProperties( :vartype multi_cloud_connector_id: str :ivar aws_s3_bucket_id: The AWS S3 bucket ARM resource Id. Required. :vartype aws_s3_bucket_id: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_MULTI_CLOUD_CONNECTOR. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_MULTI_CLOUD_CONNECTOR """ @@ -513,7 +573,7 @@ class AzureMultiCloudConnectorEndpointProperties( aws_s3_bucket_id: str = rest_field(name="awsS3BucketId", visibility=["read", "create", "update", "delete", "query"]) """The AWS S3 bucket ARM resource Id. Required.""" endpoint_type: Literal[EndpointType.AZURE_MULTI_CLOUD_CONNECTOR] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_MULTI_CLOUD_CONNECTOR.""" @overload def __init__( @@ -522,6 +582,7 @@ def __init__( multi_cloud_connector_id: str, aws_s3_bucket_id: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, ) -> None: ... @overload @@ -532,7 +593,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_MULTI_CLOUD_CONNECTOR, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_MULTI_CLOUD_CONNECTOR # type: ignore class EndpointBaseUpdateProperties(_Model): @@ -543,11 +605,11 @@ class EndpointBaseUpdateProperties(_Model): AzureStorageBlobContainerEndpointUpdateProperties, AzureStorageNfsFileShareEndpointUpdateProperties, AzureStorageSmbFileShareEndpointUpdateProperties, NfsMountEndpointUpdateProperties, - SmbMountEndpointUpdateProperties + S3WithHmacEndpointUpdateProperties, SmbMountEndpointUpdateProperties :ivar endpoint_type: The Endpoint resource type. Required. Known values are: "AzureStorageBlobContainer", "NfsMount", "AzureStorageSmbFileShare", "SmbMount", - "AzureMultiCloudConnector", and "AzureStorageNfsFileShare". + "AzureMultiCloudConnector", "AzureStorageNfsFileShare", and "S3WithHMAC". :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.EndpointType :ivar description: A description for the Endpoint. :vartype description: str @@ -556,8 +618,8 @@ class EndpointBaseUpdateProperties(_Model): __mapping__: dict[str, _Model] = {} endpoint_type: str = rest_discriminator(name="endpointType", visibility=["read", "create"]) """The Endpoint resource type. Required. Known values are: \"AzureStorageBlobContainer\", - \"NfsMount\", \"AzureStorageSmbFileShare\", \"SmbMount\", \"AzureMultiCloudConnector\", and - \"AzureStorageNfsFileShare\".""" + \"NfsMount\", \"AzureStorageSmbFileShare\", \"SmbMount\", \"AzureMultiCloudConnector\", + \"AzureStorageNfsFileShare\", and \"S3WithHMAC\".""" description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """A description for the Endpoint.""" @@ -587,12 +649,12 @@ class AzureMultiCloudConnectorEndpointUpdateProperties( :ivar description: A description for the Endpoint. :vartype description: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_MULTI_CLOUD_CONNECTOR. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_MULTI_CLOUD_CONNECTOR """ endpoint_type: Literal[EndpointType.AZURE_MULTI_CLOUD_CONNECTOR] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_MULTI_CLOUD_CONNECTOR.""" @overload def __init__( @@ -609,7 +671,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_MULTI_CLOUD_CONNECTOR, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_MULTI_CLOUD_CONNECTOR # type: ignore class AzureStorageBlobContainerEndpointProperties( @@ -619,6 +682,9 @@ class AzureStorageBlobContainerEndpointProperties( :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -628,7 +694,7 @@ class AzureStorageBlobContainerEndpointProperties( :ivar blob_container_name: The name of the Storage blob container that is the target destination. Required. :vartype blob_container_name: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_BLOB_CONTAINER. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_BLOB_CONTAINER """ @@ -637,7 +703,7 @@ class AzureStorageBlobContainerEndpointProperties( blob_container_name: str = rest_field(name="blobContainerName", visibility=["read", "create"]) """The name of the Storage blob container that is the target destination. Required.""" endpoint_type: Literal[EndpointType.AZURE_STORAGE_BLOB_CONTAINER] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_BLOB_CONTAINER.""" @overload def __init__( @@ -646,6 +712,7 @@ def __init__( storage_account_resource_id: str, blob_container_name: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, ) -> None: ... @overload @@ -656,7 +723,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_BLOB_CONTAINER, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_BLOB_CONTAINER # type: ignore class AzureStorageBlobContainerEndpointUpdateProperties( @@ -666,12 +734,12 @@ class AzureStorageBlobContainerEndpointUpdateProperties( :ivar description: A description for the Endpoint. :vartype description: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_BLOB_CONTAINER. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_BLOB_CONTAINER """ endpoint_type: Literal[EndpointType.AZURE_STORAGE_BLOB_CONTAINER] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_BLOB_CONTAINER.""" @overload def __init__( @@ -688,7 +756,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_BLOB_CONTAINER, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_BLOB_CONTAINER # type: ignore class AzureStorageNfsFileShareEndpointProperties( @@ -698,6 +767,9 @@ class AzureStorageNfsFileShareEndpointProperties( :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -705,7 +777,7 @@ class AzureStorageNfsFileShareEndpointProperties( :vartype storage_account_resource_id: str :ivar file_share_name: The name of the Azure Storage NFS file share. Required. :vartype file_share_name: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_NFS_FILE_SHARE. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_NFS_FILE_SHARE """ @@ -714,7 +786,7 @@ class AzureStorageNfsFileShareEndpointProperties( file_share_name: str = rest_field(name="fileShareName", visibility=["read", "create"]) """The name of the Azure Storage NFS file share. Required.""" endpoint_type: Literal[EndpointType.AZURE_STORAGE_NFS_FILE_SHARE] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_NFS_FILE_SHARE.""" @overload def __init__( @@ -723,6 +795,7 @@ def __init__( storage_account_resource_id: str, file_share_name: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, ) -> None: ... @overload @@ -733,7 +806,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_NFS_FILE_SHARE, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_NFS_FILE_SHARE # type: ignore class AzureStorageNfsFileShareEndpointUpdateProperties( @@ -743,12 +817,12 @@ class AzureStorageNfsFileShareEndpointUpdateProperties( :ivar description: A description for the Endpoint. :vartype description: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_NFS_FILE_SHARE. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_NFS_FILE_SHARE """ endpoint_type: Literal[EndpointType.AZURE_STORAGE_NFS_FILE_SHARE] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_NFS_FILE_SHARE.""" @overload def __init__( @@ -765,7 +839,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_NFS_FILE_SHARE, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_NFS_FILE_SHARE # type: ignore class AzureStorageSmbFileShareEndpointProperties( @@ -775,6 +850,9 @@ class AzureStorageSmbFileShareEndpointProperties( :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -782,7 +860,7 @@ class AzureStorageSmbFileShareEndpointProperties( :vartype storage_account_resource_id: str :ivar file_share_name: The name of the Azure Storage file share. Required. :vartype file_share_name: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_SMB_FILE_SHARE. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_SMB_FILE_SHARE """ @@ -791,7 +869,7 @@ class AzureStorageSmbFileShareEndpointProperties( file_share_name: str = rest_field(name="fileShareName", visibility=["read", "create"]) """The name of the Azure Storage file share. Required.""" endpoint_type: Literal[EndpointType.AZURE_STORAGE_SMB_FILE_SHARE] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_SMB_FILE_SHARE.""" @overload def __init__( @@ -800,6 +878,7 @@ def __init__( storage_account_resource_id: str, file_share_name: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, ) -> None: ... @overload @@ -810,7 +889,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_SMB_FILE_SHARE, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_SMB_FILE_SHARE # type: ignore class AzureStorageSmbFileShareEndpointUpdateProperties( @@ -820,18 +900,124 @@ class AzureStorageSmbFileShareEndpointUpdateProperties( :ivar description: A description for the Endpoint. :vartype description: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. AZURE_STORAGE_SMB_FILE_SHARE. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.AZURE_STORAGE_SMB_FILE_SHARE """ endpoint_type: Literal[EndpointType.AZURE_STORAGE_SMB_FILE_SHARE] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. AZURE_STORAGE_SMB_FILE_SHARE.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.AZURE_STORAGE_SMB_FILE_SHARE # type: ignore + + +class Connection(ProxyResource): + """The Connection resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.storagemover.models.SystemData + :ivar properties: Connection properties. Required. + :vartype properties: ~azure.mgmt.storagemover.models.ConnectionProperties + """ + + properties: "_models.ConnectionProperties" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Connection properties. Required.""" @overload def __init__( self, *, + properties: "_models.ConnectionProperties", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ConnectionProperties(_Model): + """Properties of the Connection resource. + + :ivar description: A description for the Connection. + :vartype description: str + :ivar connection_status: The connection status. Known values are: "Approved", "Rejected", + "Disconnected", "Pending", and "Stale". + :vartype connection_status: str or ~azure.mgmt.storagemover.models.ConnectionStatus + :ivar private_link_service_id: The PrivateLinkServiceId for the connection. Required. + :vartype private_link_service_id: str + :ivar private_endpoint_name: The PrivateEndpointName associated with the connection. + :vartype private_endpoint_name: str + :ivar private_endpoint_resource_id: The privateEndpoint resource Id. + :vartype private_endpoint_resource_id: str + :ivar job_list: List of job definitions associated with this connection. + :vartype job_list: list[str] + :ivar provisioning_state: The provisioning state of this resource. Known values are: + "Succeeded", "Canceled", "Failed", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description for the Connection.""" + connection_status: Optional[Union[str, "_models.ConnectionStatus"]] = rest_field( + name="connectionStatus", visibility=["read"] + ) + """The connection status. Known values are: \"Approved\", \"Rejected\", \"Disconnected\", + \"Pending\", and \"Stale\".""" + private_link_service_id: str = rest_field( + name="privateLinkServiceId", visibility=["read", "create", "update", "delete", "query"] + ) + """The PrivateLinkServiceId for the connection. Required.""" + private_endpoint_name: Optional[str] = rest_field(name="privateEndpointName", visibility=["read"]) + """The PrivateEndpointName associated with the connection.""" + private_endpoint_resource_id: Optional[str] = rest_field(name="privateEndpointResourceId", visibility=["read"]) + """The privateEndpoint resource Id.""" + job_list: Optional[list[str]] = rest_field( + name="jobList", visibility=["read", "create", "update", "delete", "query"] + ) + """List of job definitions associated with this connection.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of this resource. Known values are: \"Succeeded\", \"Canceled\", + \"Failed\", and \"Deleting\".""" + + @overload + def __init__( + self, + *, + private_link_service_id: str, description: Optional[str] = None, + job_list: Optional[list[str]] = None, ) -> None: ... @overload @@ -842,7 +1028,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.AZURE_STORAGE_SMB_FILE_SHARE, **kwargs) + super().__init__(*args, **kwargs) class Endpoint(ProxyResource): @@ -1045,6 +1231,10 @@ class JobDefinition(ProxyResource): "agent_resource_id", "source_target_map", "provisioning_state", + "connections", + "schedule", + "data_integrity_validation", + "preserve_permissions", ] @overload @@ -1125,6 +1315,16 @@ class JobDefinitionProperties(_Model): :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState + :ivar connections: List of connections associated to this job. + :vartype connections: list[str] + :ivar schedule: Schedule information for the Job Definition. + :vartype schedule: ~azure.mgmt.storagemover.models.ScheduleInfo + :ivar data_integrity_validation: The checksum validation mode for the job definition. Known + values are: "SaveVerifyFileMD5", "SaveFileMD5", and "None". + :vartype data_integrity_validation: str or + ~azure.mgmt.storagemover.models.DataIntegrityValidation + :ivar preserve_permissions: Boolean to preserve permissions or not. + :vartype preserve_permissions: bool """ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1173,6 +1373,19 @@ class JobDefinitionProperties(_Model): ) """The provisioning state of this resource. Known values are: \"Succeeded\", \"Canceled\", \"Failed\", and \"Deleting\".""" + connections: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of connections associated to this job.""" + schedule: Optional["_models.ScheduleInfo"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Schedule information for the Job Definition.""" + data_integrity_validation: Optional[Union[str, "_models.DataIntegrityValidation"]] = rest_field( + name="dataIntegrityValidation", visibility=["read", "create", "update", "delete", "query"] + ) + """The checksum validation mode for the job definition. Known values are: \"SaveVerifyFileMD5\", + \"SaveFileMD5\", and \"None\".""" + preserve_permissions: Optional[bool] = rest_field( + name="preservePermissions", visibility=["read", "create", "update", "delete", "query"] + ) + """Boolean to preserve permissions or not.""" @overload def __init__( @@ -1187,6 +1400,10 @@ def __init__( target_subpath: Optional[str] = None, agent_name: Optional[str] = None, source_target_map: Optional["_models.JobDefinitionPropertiesSourceTargetMap"] = None, + connections: Optional[list[str]] = None, + schedule: Optional["_models.ScheduleInfo"] = None, + data_integrity_validation: Optional[Union[str, "_models.DataIntegrityValidation"]] = None, + preserve_permissions: Optional[bool] = None, ) -> None: ... @overload @@ -1222,7 +1439,7 @@ class JobDefinitionUpdateParameters(_Model): ) """Job definition properties.""" - __flattened_items = ["description", "copy_mode", "agent_name"] + __flattened_items = ["description", "copy_mode", "agent_name", "connections", "data_integrity_validation"] @overload def __init__( @@ -1269,6 +1486,12 @@ class JobDefinitionUpdateProperties(_Model): :vartype copy_mode: str or ~azure.mgmt.storagemover.models.CopyMode :ivar agent_name: Name of the Agent to assign for new Job Runs of this Job Definition. :vartype agent_name: str + :ivar connections: List of connections associated to this job. + :vartype connections: list[str] + :ivar data_integrity_validation: Data Integrity Validation mode. Known values are: + "SaveVerifyFileMD5", "SaveFileMD5", and "None". + :vartype data_integrity_validation: str or + ~azure.mgmt.storagemover.models.DataIntegrityValidation """ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1279,6 +1502,13 @@ class JobDefinitionUpdateProperties(_Model): """Strategy to use for copy. Known values are: \"Additive\" and \"Mirror\".""" agent_name: Optional[str] = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) """Name of the Agent to assign for new Job Runs of this Job Definition.""" + connections: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of connections associated to this job.""" + data_integrity_validation: Optional[Union[str, "_models.DataIntegrityValidation"]] = rest_field( + name="dataIntegrityValidation", visibility=["read", "create", "update", "delete", "query"] + ) + """Data Integrity Validation mode. Known values are: \"SaveVerifyFileMD5\", \"SaveFileMD5\", and + \"None\".""" @overload def __init__( @@ -1287,6 +1517,8 @@ def __init__( description: Optional[str] = None, copy_mode: Optional[Union[str, "_models.CopyMode"]] = None, agent_name: Optional[str] = None, + connections: Optional[list[str]] = None, + data_integrity_validation: Optional[Union[str, "_models.DataIntegrityValidation"]] = None, ) -> None: ... @overload @@ -1330,6 +1562,8 @@ class JobRun(ProxyResource): "agent_resource_id", "execution_start_time", "execution_end_time", + "trigger_type", + "scheduled_execution_time", "last_status_update", "items_scanned", "items_excluded", @@ -1351,6 +1585,7 @@ class JobRun(ProxyResource): "target_properties", "job_definition_properties", "error", + "warnings", "provisioning_state", ] @@ -1448,6 +1683,11 @@ class JobRunProperties(_Model): :ivar execution_end_time: End time of the run. Null if Agent has not reported that the job has ended. :vartype execution_end_time: ~datetime.datetime + :ivar trigger_type: Trigger type for the job run. Default is manual. Known values are: "Manual" + and "Scheduled". + :vartype trigger_type: str or ~azure.mgmt.storagemover.models.TriggerType + :ivar scheduled_execution_time: Scheduled execution time. Null if Trigger type is manual. + :vartype scheduled_execution_time: ~datetime.datetime :ivar last_status_update: The last updated time of the Job Run. :vartype last_status_update: ~datetime.datetime :ivar items_scanned: Number of items scanned so far in source. @@ -1500,6 +1740,8 @@ class JobRunProperties(_Model): :vartype job_definition_properties: any :ivar error: Error details. :vartype error: ~azure.mgmt.storagemover.models.JobRunError + :ivar warnings: Warning details. + :vartype warnings: list[~azure.mgmt.storagemover.models.JobRunWarning] :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -1524,6 +1766,13 @@ class JobRunProperties(_Model): name="executionEndTime", visibility=["read"], format="rfc3339" ) """End time of the run. Null if Agent has not reported that the job has ended.""" + trigger_type: Optional[Union[str, "_models.TriggerType"]] = rest_field(name="triggerType", visibility=["read"]) + """Trigger type for the job run. Default is manual. Known values are: \"Manual\" and + \"Scheduled\".""" + scheduled_execution_time: Optional[datetime.datetime] = rest_field( + name="scheduledExecutionTime", visibility=["read"], format="rfc3339" + ) + """Scheduled execution time. Null if Trigger type is manual.""" last_status_update: Optional[datetime.datetime] = rest_field( name="lastStatusUpdate", visibility=["read"], format="rfc3339" ) @@ -1570,6 +1819,8 @@ class JobRunProperties(_Model): """Copy of parent Job Definition's properties at time of Job Run creation.""" error: Optional["_models.JobRunError"] = rest_field(visibility=["read"]) """Error details.""" + warnings: Optional[list["_models.JobRunWarning"]] = rest_field(visibility=["read"]) + """Warning details.""" provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( name="provisioningState", visibility=["read"] ) @@ -1588,6 +1839,44 @@ class JobRunResourceId(_Model): """Fully qualified resource id of the Job Run.""" +class JobRunWarning(_Model): + """Warning type. + + :ivar code: Error code of the given entry. + :vartype code: str + :ivar message: Warning message of the given entry. + :vartype message: str + :ivar target: Target of the given error entry. + :vartype target: str + """ + + code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error code of the given entry.""" + message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Warning message of the given entry.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target of the given error entry.""" + + @overload + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class ManagedServiceIdentity(_Model): """Managed service identity (system assigned and/or user assigned identities). @@ -1645,6 +1934,9 @@ class NfsMountEndpointProperties(EndpointBaseProperties, discriminator="NfsMount :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -1654,7 +1946,7 @@ class NfsMountEndpointProperties(EndpointBaseProperties, discriminator="NfsMount :vartype nfs_version: str or ~azure.mgmt.storagemover.models.NfsVersion :ivar export: The directory being exported from the server. Required. :vartype export: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. NFS_MOUNT. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.NFS_MOUNT """ @@ -1667,7 +1959,7 @@ class NfsMountEndpointProperties(EndpointBaseProperties, discriminator="NfsMount export: str = rest_field(visibility=["read", "create"]) """The directory being exported from the server. Required.""" endpoint_type: Literal[EndpointType.NFS_MOUNT] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. NFS_MOUNT.""" @overload def __init__( @@ -1676,6 +1968,7 @@ def __init__( host: str, export: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, nfs_version: Optional[Union[str, "_models.NfsVersion"]] = None, ) -> None: ... @@ -1687,7 +1980,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.NFS_MOUNT, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.NFS_MOUNT # type: ignore class NfsMountEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminator="NfsMount"): @@ -1695,12 +1989,12 @@ class NfsMountEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminat :ivar description: A description for the Endpoint. :vartype description: str - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. NFS_MOUNT. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.NFS_MOUNT """ endpoint_type: Literal[EndpointType.NFS_MOUNT] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. NFS_MOUNT.""" @overload def __init__( @@ -1717,7 +2011,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.NFS_MOUNT, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.NFS_MOUNT # type: ignore class Operation(_Model): @@ -1778,7 +2073,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class OperationDisplay(_Model): - """Localized display information for and operation. + """Localized display information for an operation. :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft Monitoring Insights" or "Microsoft Compute". @@ -2022,11 +2317,197 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) +class S3WithHmacEndpointProperties(EndpointBaseProperties, discriminator="S3WithHMAC"): + """The properties of S3WithHmac share endpoint. + + :ivar description: A description for the Endpoint. + :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind + :ivar provisioning_state: The provisioning state of this resource. Known values are: + "Succeeded", "Canceled", "Failed", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState + :ivar credentials: The Azure Key Vault credentials which stores the access key and secret key. + Use empty string to clean-up existing value. + :vartype credentials: ~azure.mgmt.storagemover.models.AzureKeyVaultS3WithHmacCredentials + :ivar source_uri: The URI which points to the source. + :vartype source_uri: str + :ivar source_type: The source type of S3WithHmac endpoint. Known values are: "MINIO", + "BACKBLAZE", "IBM", "CLOUDFLARE", and "GCS". + :vartype source_type: str or ~azure.mgmt.storagemover.models.S3WithHmacSourceType + :ivar other_source_type_description: The description for other source type of S3WithHmac + endpoint. + :vartype other_source_type_description: str + :ivar endpoint_type: The Endpoint resource type. Required. S3_WITH_HMAC. + :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.S3_WITH_HMAC + """ + + credentials: Optional["_models.AzureKeyVaultS3WithHmacCredentials"] = rest_field(visibility=["read", "create"]) + """The Azure Key Vault credentials which stores the access key and secret key. Use empty string to + clean-up existing value.""" + source_uri: Optional[str] = rest_field(name="sourceUri", visibility=["read", "create"]) + """The URI which points to the source.""" + source_type: Optional[Union[str, "_models.S3WithHmacSourceType"]] = rest_field( + name="sourceType", visibility=["read", "create"] + ) + """The source type of S3WithHmac endpoint. Known values are: \"MINIO\", \"BACKBLAZE\", \"IBM\", + \"CLOUDFLARE\", and \"GCS\".""" + other_source_type_description: Optional[str] = rest_field( + name="otherSourceTypeDescription", visibility=["read", "create"] + ) + """The description for other source type of S3WithHmac endpoint.""" + endpoint_type: Literal[EndpointType.S3_WITH_HMAC] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The Endpoint resource type. Required. S3_WITH_HMAC.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, + credentials: Optional["_models.AzureKeyVaultS3WithHmacCredentials"] = None, + source_uri: Optional[str] = None, + source_type: Optional[Union[str, "_models.S3WithHmacSourceType"]] = None, + other_source_type_description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.S3_WITH_HMAC # type: ignore + + +class S3WithHmacEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminator="S3WithHMAC"): + """S3WithHmacEndpointUpdateProperties. + + :ivar description: A description for the Endpoint. + :vartype description: str + :ivar credentials: The Azure Key Vault secret URIs which store the required credentials to + access the S3. + :vartype credentials: ~azure.mgmt.storagemover.models.AzureKeyVaultS3WithHmacCredentials + :ivar endpoint_type: The Endpoint resource type. Required. S3_WITH_HMAC. + :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.S3_WITH_HMAC + """ + + credentials: Optional["_models.AzureKeyVaultS3WithHmacCredentials"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Key Vault secret URIs which store the required credentials to access the S3.""" + endpoint_type: Literal[EndpointType.S3_WITH_HMAC] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The Endpoint resource type. Required. S3_WITH_HMAC.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + credentials: Optional["_models.AzureKeyVaultS3WithHmacCredentials"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.S3_WITH_HMAC # type: ignore + + +class ScheduleInfo(_Model): + """Schedule information for the Job Definition. + + :ivar frequency: Type of schedule — Monthly, Weekly, or Daily. Required. Known values are: + "Monthly", "Weekly", "Daily", and "Onetime". + :vartype frequency: str or ~azure.mgmt.storagemover.models.Frequency + :ivar is_active: Whether the schedule is currently active. Required. + :vartype is_active: bool + :ivar execution_time: Time of day to execute (hours and minutes). + :vartype execution_time: ~azure.mgmt.storagemover.models.Time + :ivar start_date: Specific one-time execution date and time. + :vartype start_date: ~datetime.datetime + :ivar days_of_week: Days of the week for weekly schedules. + :vartype days_of_week: list[str] + :ivar days_of_month: Days of the month for monthly schedules. + :vartype days_of_month: list[int] + :ivar cron_expression: Optional CRON expression for advanced scheduling. + :vartype cron_expression: str + :ivar end_date: End time of the schedule (in UTC). + :vartype end_date: ~datetime.datetime + """ + + frequency: Union[str, "_models.Frequency"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Type of schedule — Monthly, Weekly, or Daily. Required. Known values are: \"Monthly\", + \"Weekly\", \"Daily\", and \"Onetime\".""" + is_active: bool = rest_field(name="isActive", visibility=["read", "create", "update", "delete", "query"]) + """Whether the schedule is currently active. Required.""" + execution_time: Optional["_models.Time"] = rest_field( + name="executionTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Time of day to execute (hours and minutes).""" + start_date: Optional[datetime.datetime] = rest_field( + name="startDate", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Specific one-time execution date and time.""" + days_of_week: Optional[list[str]] = rest_field( + name="daysOfWeek", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the week for weekly schedules.""" + days_of_month: Optional[list[int]] = rest_field( + name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the month for monthly schedules.""" + cron_expression: Optional[str] = rest_field( + name="cronExpression", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional CRON expression for advanced scheduling.""" + end_date: Optional[datetime.datetime] = rest_field( + name="endDate", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """End time of the schedule (in UTC).""" + + @overload + def __init__( + self, + *, + frequency: Union[str, "_models.Frequency"], + is_active: bool, + execution_time: Optional["_models.Time"] = None, + start_date: Optional[datetime.datetime] = None, + days_of_week: Optional[list[str]] = None, + days_of_month: Optional[list[int]] = None, + cron_expression: Optional[str] = None, + end_date: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + class SmbMountEndpointProperties(EndpointBaseProperties, discriminator="SmbMount"): """The properties of SMB share endpoint. :ivar description: A description for the Endpoint. :vartype description: str + :ivar endpoint_kind: The Endpoint resource kind source or target. Known values are: "Source" + and "Target". + :vartype endpoint_kind: str or ~azure.mgmt.storagemover.models.EndpointKind :ivar provisioning_state: The provisioning state of this resource. Known values are: "Succeeded", "Canceled", "Failed", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.storagemover.models.ProvisioningState @@ -2037,7 +2518,7 @@ class SmbMountEndpointProperties(EndpointBaseProperties, discriminator="SmbMount :ivar credentials: The Azure Key Vault secret URIs which store the required credentials to access the SMB share. :vartype credentials: ~azure.mgmt.storagemover.models.AzureKeyVaultSmbCredentials - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. SMB_MOUNT. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.SMB_MOUNT """ @@ -2050,7 +2531,7 @@ class SmbMountEndpointProperties(EndpointBaseProperties, discriminator="SmbMount ) """The Azure Key Vault secret URIs which store the required credentials to access the SMB share.""" endpoint_type: Literal[EndpointType.SMB_MOUNT] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. SMB_MOUNT.""" @overload def __init__( @@ -2059,6 +2540,7 @@ def __init__( host: str, share_name: str, description: Optional[str] = None, + endpoint_kind: Optional[Union[str, "_models.EndpointKind"]] = None, credentials: Optional["_models.AzureKeyVaultSmbCredentials"] = None, ) -> None: ... @@ -2070,7 +2552,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.SMB_MOUNT, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.SMB_MOUNT # type: ignore class SmbMountEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminator="SmbMount"): @@ -2081,7 +2564,7 @@ class SmbMountEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminat :ivar credentials: The Azure Key Vault secret URIs which store the required credentials to access the SMB share. :vartype credentials: ~azure.mgmt.storagemover.models.AzureKeyVaultSmbCredentials - :ivar endpoint_type: The Endpoint resource type. Required. + :ivar endpoint_type: The Endpoint resource type. Required. SMB_MOUNT. :vartype endpoint_type: str or ~azure.mgmt.storagemover.models.SMB_MOUNT """ @@ -2090,7 +2573,7 @@ class SmbMountEndpointUpdateProperties(EndpointBaseUpdateProperties, discriminat ) """The Azure Key Vault secret URIs which store the required credentials to access the SMB share.""" endpoint_type: Literal[EndpointType.SMB_MOUNT] = rest_discriminator(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore - """The Endpoint resource type. Required.""" + """The Endpoint resource type. Required. SMB_MOUNT.""" @overload def __init__( @@ -2108,7 +2591,8 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, endpoint_type=EndpointType.SMB_MOUNT, **kwargs) + super().__init__(*args, **kwargs) + self.endpoint_type = EndpointType.SMB_MOUNT # type: ignore class SourceEndpoint(_Model): diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/__init__.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/__init__.py index 5f500539f0f7..53e1d7223645 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/__init__.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/__init__.py @@ -18,6 +18,7 @@ from ._operations import EndpointsOperations # type: ignore from ._operations import ProjectsOperations # type: ignore from ._operations import JobDefinitionsOperations # type: ignore +from ._operations import ConnectionsOperations # type: ignore from ._operations import JobRunsOperations # type: ignore from ._patch import __all__ as _patch_all @@ -31,6 +32,7 @@ "EndpointsOperations", "ProjectsOperations", "JobDefinitionsOperations", + "ConnectionsOperations", "JobRunsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/_operations.py b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/_operations.py index 8fedd860b573..860e2384b364 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/_operations.py +++ b/sdk/storagemover/azure-mgmt-storagemover/azure/mgmt/storagemover/operations/_operations.py @@ -36,11 +36,12 @@ from .._configuration import StorageMoverMgmtClientConfiguration from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize from .._utils.serialization import Deserializer, Serializer +from .._validation import api_version_validation -List = list T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] JSON = MutableMapping[str, Any] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -50,7 +51,7 @@ def build_operations_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +72,7 @@ def build_storage_movers_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -100,7 +101,7 @@ def build_storage_movers_create_or_update_request( # pylint: disable=name-too-l _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -131,7 +132,7 @@ def build_storage_movers_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -160,7 +161,7 @@ def build_storage_movers_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) # Construct URL _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}" path_format_arguments = { @@ -181,7 +182,7 @@ def build_storage_movers_list_request(resource_group_name: str, subscription_id: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -208,7 +209,7 @@ def build_storage_movers_list_by_subscription_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -234,7 +235,7 @@ def build_agents_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -264,7 +265,7 @@ def build_agents_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -296,7 +297,7 @@ def build_agents_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -326,7 +327,7 @@ def build_agents_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) # Construct URL _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/agents/{agentName}" path_format_arguments = { @@ -350,7 +351,7 @@ def build_agents_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -378,7 +379,7 @@ def build_endpoints_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -408,7 +409,7 @@ def build_endpoints_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -440,7 +441,7 @@ def build_endpoints_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -470,7 +471,7 @@ def build_endpoints_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) # Construct URL _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/endpoints/{endpointName}" path_format_arguments = { @@ -494,7 +495,7 @@ def build_endpoints_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -522,7 +523,7 @@ def build_projects_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -552,7 +553,7 @@ def build_projects_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -584,7 +585,7 @@ def build_projects_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -614,7 +615,7 @@ def build_projects_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) # Construct URL _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/projects/{projectName}" path_format_arguments = { @@ -638,7 +639,7 @@ def build_projects_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -671,7 +672,7 @@ def build_job_definitions_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -707,7 +708,7 @@ def build_job_definitions_create_or_update_request( # pylint: disable=name-too- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -745,7 +746,7 @@ def build_job_definitions_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -781,7 +782,7 @@ def build_job_definitions_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) # Construct URL _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/projects/{projectName}/jobDefinitions/{jobDefinitionName}" path_format_arguments = { @@ -806,7 +807,7 @@ def build_job_definitions_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -840,7 +841,7 @@ def build_job_definitions_start_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -875,7 +876,7 @@ def build_job_definitions_stop_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -899,6 +900,118 @@ def build_job_definitions_stop_job_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) +def build_connections_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, storage_mover_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/connections/{connectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageMoverName": _SERIALIZER.url("storage_mover_name", storage_mover_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_get_request( + resource_group_name: str, storage_mover_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/connections/{connectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageMoverName": _SERIALIZER.url("storage_mover_name", storage_mover_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_list_request( + resource_group_name: str, storage_mover_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/connections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageMoverName": _SERIALIZER.url("storage_mover_name", storage_mover_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_connections_delete_request( + resource_group_name: str, storage_mover_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/connections/{connectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "storageMoverName": _SERIALIZER.url("storage_mover_name", storage_mover_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + def build_job_runs_get_request( resource_group_name: str, storage_mover_name: str, @@ -911,7 +1024,7 @@ def build_job_runs_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -947,7 +1060,7 @@ def build_job_runs_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-12-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1048,7 +1161,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1064,7 +1180,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -1128,6 +1247,7 @@ def get(self, resource_group_name: str, storage_mover_name: str, **kwargs: Any) } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1142,11 +1262,14 @@ def get(self, resource_group_name: str, storage_mover_name: str, **kwargs: Any) except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -1293,6 +1416,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1307,11 +1431,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -1464,6 +1591,7 @@ def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1478,11 +1606,14 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.StorageMover, response.json()) @@ -1518,6 +1649,7 @@ def _delete_initial(self, resource_group_name: str, storage_mover_name: str, **k } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1531,7 +1663,10 @@ def _delete_initial(self, resource_group_name: str, storage_mover_name: str, **k except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -1539,7 +1674,7 @@ def _delete_initial(self, resource_group_name: str, storage_mover_name: str, **k response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1668,7 +1803,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.StorageMover], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.StorageMover], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1684,7 +1822,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -1752,7 +1893,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.StorageMover], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.StorageMover], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1768,7 +1912,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -1835,6 +1982,7 @@ def get(self, resource_group_name: str, storage_mover_name: str, agent_name: str } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -1849,11 +1997,14 @@ def get(self, resource_group_name: str, storage_mover_name: str, agent_name: str except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -2017,6 +2168,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2031,11 +2183,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -2195,6 +2350,7 @@ def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2209,11 +2365,14 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Agent, response.json()) @@ -2252,6 +2411,7 @@ def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2265,7 +2425,10 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -2273,7 +2436,7 @@ def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2410,7 +2573,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Agent], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Agent], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -2426,7 +2592,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -2495,6 +2664,7 @@ def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2509,11 +2679,14 @@ def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -2677,6 +2850,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2691,11 +2865,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -2861,6 +3038,7 @@ def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2875,11 +3053,14 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Endpoint, response.json()) @@ -2918,6 +3099,7 @@ def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -2931,7 +3113,10 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -2939,7 +3124,7 @@ def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3076,7 +3261,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Endpoint], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Endpoint], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -3092,7 +3280,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -3161,6 +3352,7 @@ def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3175,11 +3367,14 @@ def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -3339,6 +3534,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3353,11 +3549,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -3522,6 +3721,7 @@ def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3536,11 +3736,14 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.Project, response.json()) @@ -3579,6 +3782,7 @@ def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3592,7 +3796,10 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -3600,7 +3807,7 @@ def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -3737,7 +3944,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Project], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.Project], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -3753,7 +3963,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -3830,6 +4043,7 @@ def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -3844,11 +4058,14 @@ def get( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -4025,6 +4242,7 @@ def create_or_update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4039,11 +4257,14 @@ def create_or_update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -4222,6 +4443,7 @@ def update( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4236,11 +4458,14 @@ def update( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobDefinition, response.json()) @@ -4285,6 +4510,7 @@ def _delete_initial( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4298,7 +4524,10 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} @@ -4306,7 +4535,7 @@ def _delete_initial( response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -4456,7 +4685,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobDefinition], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.JobDefinition], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4472,7 +4704,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -4532,6 +4767,7 @@ def start_job( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4546,11 +4782,14 @@ def start_job( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobRunResourceId, response.json()) @@ -4611,6 +4850,7 @@ def stop_job( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4625,11 +4865,14 @@ def stop_job( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: deserialized = _deserialize(_models.JobRunResourceId, response.json()) @@ -4639,14 +4882,14 @@ def stop_job( return deserialized # type: ignore -class JobRunsOperations: +class ConnectionsOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.storagemover.StorageMoverMgmtClient`'s - :attr:`job_runs` attribute. + :attr:`connections` attribute. """ def __init__(self, *args, **kwargs) -> None: @@ -4656,31 +4899,133 @@ def __init__(self, *args, **kwargs) -> None: self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @overload + def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: _models.Connection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: ~azure.mgmt.storagemover.models.Connection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + storage_mover_name: str, + connection_name: str, + connection: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Connection: + """Creates or updates a Connection resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Required. + :type connection: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace - def get( + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def create_or_update( self, resource_group_name: str, storage_mover_name: str, - project_name: str, - job_definition_name: str, - job_run_name: str, + connection_name: str, + connection: Union[_models.Connection, JSON, IO[bytes]], **kwargs: Any - ) -> _models.JobRun: - """Gets a Job Run resource. + ) -> _models.Connection: + """Creates or updates a Connection resource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param storage_mover_name: The name of the Storage Mover resource. Required. :type storage_mover_name: str - :param project_name: The name of the Project resource. Required. - :type project_name: str - :param job_definition_name: The name of the Job Definition resource. Required. - :type job_definition_name: str - :param job_run_name: The name of the Job Run resource. Required. - :type job_run_name: str - :return: JobRun. The JobRun is compatible with MutableMapping - :rtype: ~azure.mgmt.storagemover.models.JobRun + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :param connection: Is one of the following types: Connection, JSON, IO[bytes] Required. + :type connection: ~azure.mgmt.storagemover.models.Connection or JSON or IO[bytes] + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4691,19 +5036,27 @@ def get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.JobRun] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) - _request = build_job_runs_get_request( + content_type = content_type or "application/json" + _content = None + if isinstance(connection, (IOBase, bytes)): + _content = connection + else: + _content = json.dumps(connection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_connections_create_or_update_request( resource_group_name=resource_group_name, storage_mover_name=storage_mover_name, - project_name=project_name, - job_definition_name=job_definition_name, - job_run_name=job_run_name, + connection_name=connection_name, subscription_id=self._config.subscription_id, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) @@ -4712,6 +5065,7 @@ def get( } _request.url = self._client.format_url(_request.url, **path_format_arguments) + _decompress = kwargs.pop("decompress", True) _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs @@ -4719,20 +5073,23 @@ def get( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 201]: if _stream: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if _stream: - deserialized = response.iter_bytes() + deserialized = response.iter_bytes() if _decompress else response.iter_raw() else: - deserialized = _deserialize(_models.JobRun, response.json()) + deserialized = _deserialize(_models.Connection, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -4740,34 +5097,36 @@ def get( return deserialized # type: ignore @distributed_trace - def list( - self, - resource_group_name: str, - storage_mover_name: str, - project_name: str, - job_definition_name: str, - **kwargs: Any - ) -> ItemPaged["_models.JobRun"]: - """Lists all Job Runs in a Job Definition. + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + "accept", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def get( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> _models.Connection: + """Gets a Connection resource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param storage_mover_name: The name of the Storage Mover resource. Required. :type storage_mover_name: str - :param project_name: The name of the Project resource. Required. - :type project_name: str - :param job_definition_name: The name of the Job Definition resource. Required. - :type job_definition_name: str - :return: An iterator like instance of JobRun - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagemover.models.JobRun] + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.Connection :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.JobRun]] = kwargs.pop("cls", None) - error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -4776,14 +5135,460 @@ def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - _request = build_job_runs_list_request( - resource_group_name=resource_group_name, - storage_mover_name=storage_mover_name, - project_name=project_name, - job_definition_name=job_definition_name, + cls: ClsType[_models.Connection] = kwargs.pop("cls", None) + + _request = build_connections_get_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.Connection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": ["api_version", "subscription_id", "resource_group_name", "storage_mover_name", "accept"] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def list(self, resource_group_name: str, storage_mover_name: str, **kwargs: Any) -> ItemPaged["_models.Connection"]: + """Lists all Connections in a Storage Mover. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :return: An iterator like instance of Connection + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagemover.models.Connection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Connection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_connections_list_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Connection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def _delete_initial( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_connections_delete_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-08-01", + params_added_on={ + "2025-08-01": [ + "api_version", + "subscription_id", + "resource_group_name", + "storage_mover_name", + "connection_name", + ] + }, + api_versions_list=["2025-08-01", "2025-12-01"], + ) + def begin_delete( + self, resource_group_name: str, storage_mover_name: str, connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes a Connection resource. Returns 409 if there are active jobs using this connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param connection_name: The name of the Connection resource. Required. + :type connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + connection_name=connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + +class JobRunsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.storagemover.StorageMoverMgmtClient`'s + :attr:`job_runs` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: StorageMoverMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, + resource_group_name: str, + storage_mover_name: str, + project_name: str, + job_definition_name: str, + job_run_name: str, + **kwargs: Any + ) -> _models.JobRun: + """Gets a Job Run resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param project_name: The name of the Project resource. Required. + :type project_name: str + :param job_definition_name: The name of the Job Definition resource. Required. + :type job_definition_name: str + :param job_run_name: The name of the Job Run resource. Required. + :type job_run_name: str + :return: JobRun. The JobRun is compatible with MutableMapping + :rtype: ~azure.mgmt.storagemover.models.JobRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.JobRun] = kwargs.pop("cls", None) + + _request = build_job_runs_get_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + project_name=project_name, + job_definition_name=job_definition_name, + job_run_name=job_run_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.JobRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + storage_mover_name: str, + project_name: str, + job_definition_name: str, + **kwargs: Any + ) -> ItemPaged["_models.JobRun"]: + """Lists all Job Runs in a Job Definition. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param storage_mover_name: The name of the Storage Mover resource. Required. + :type storage_mover_name: str + :param project_name: The name of the Project resource. Required. + :type project_name: str + :param job_definition_name: The name of the Job Definition resource. Required. + :type job_definition_name: str + :return: An iterator like instance of JobRun + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagemover.models.JobRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.JobRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_job_runs_list_request( + resource_group_name=resource_group_name, + storage_mover_name=storage_mover_name, + project_name=project_name, + job_definition_name=job_definition_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4820,7 +5625,10 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobRun], deserialized.get("value", [])) + list_of_elem = _deserialize( + List[_models.JobRun], + deserialized.get("value", []), + ) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4836,7 +5644,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_maximum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_maximum_set.py index 29c5ffd9cc1e..25e3c685a3d3 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_maximum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_maximum_set.py @@ -56,6 +56,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_CreateOrUpdate_MaximumSet.json +# x-ms-original-file: 2025-12-01/Agents_CreateOrUpdate_MaximumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_minimum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_minimum_set.py index 792fd6e31fb4..354869135739 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_minimum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_minimum_set.py @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_CreateOrUpdate_MinimumSet.json +# x-ms-original-file: 2025-12-01/Agents_CreateOrUpdate_MinimumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_upload_limit_schedule_overnight.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_upload_limit_schedule_overnight.py index dffb4e8d1cfc..b589c8016268 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_upload_limit_schedule_overnight.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_create_or_update_upload_limit_schedule_overnight.py @@ -61,6 +61,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_CreateOrUpdate_UploadLimitSchedule_Overnight.json +# x-ms-original-file: 2025-12-01/Agents_CreateOrUpdate_UploadLimitSchedule_Overnight.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_delete.py index 630f31312656..e2ef9e98fcc9 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_delete.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2025-07-01/Agents_Delete.json +# x-ms-original-file: 2025-12-01/Agents_Delete.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_maximum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_maximum_set.py index 7317836d7b38..93c6289f8d4b 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_maximum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_maximum_set.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_Get_MaximumSet.json +# x-ms-original-file: 2025-12-01/Agents_Get_MaximumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_minimum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_minimum_set.py index f3f2dccc03e9..6511c151a7eb 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_minimum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_get_minimum_set.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_Get_MinimumSet.json +# x-ms-original-file: 2025-12-01/Agents_Get_MinimumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_maximum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_maximum_set.py index 4d7187ebe357..171ccbda3409 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_maximum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_maximum_set.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/Agents_List_MaximumSet.json +# x-ms-original-file: 2025-12-01/Agents_List_MaximumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_minimum_set.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_minimum_set.py index a3c2fefe57e5..fa4e0296b7d6 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_minimum_set.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_list_minimum_set.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/Agents_List_MinimumSet.json +# x-ms-original-file: 2025-12-01/Agents_List_MinimumSet.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_update.py index 7525e0bea822..8d35ec720d15 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/agents_update.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Agents_Update.json +# x-ms-original-file: 2025-12-01/Agents_Update.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_create_or_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_create_or_update.py new file mode 100644 index 000000000000..c34d1c17c8ee --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_create_or_update.py @@ -0,0 +1,50 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python connections_create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connections.create_or_update( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + connection_name="example-connection", + connection={ + "properties": { + "description": "Example Connection Description", + "privateLinkServiceId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.Network/privateLinkServices/example-pls", + } + }, + ) + print(response) + + +# x-ms-original-file: 2025-12-01/Connections_CreateOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_delete.py new file mode 100644 index 000000000000..3f1ea6861d6b --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python connections_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.connections.begin_delete( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + connection_name="example-connection", + ).result() + + +# x-ms-original-file: 2025-12-01/Connections_Delete.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_get.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_get.py new file mode 100644 index 000000000000..af23fb4c43fd --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python connections_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connections.get( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + connection_name="example-connection", + ) + print(response) + + +# x-ms-original-file: 2025-12-01/Connections_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_list.py new file mode 100644 index 000000000000..ac13008d86a8 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/connections_list.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python connections_list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.connections.list( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-12-01/Connections_List.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_multi_cloud_connector.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_multi_cloud_connector.py index cd49174f0539..cbdec2cb2e43 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_multi_cloud_connector.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_multi_cloud_connector.py @@ -39,6 +39,7 @@ def main(): "properties": { "awsS3BucketId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.AwsConnector/s3Buckets/testBucket", "description": "Example multi cloud connector resource id", + "endpointKind": "Source", "endpointType": "AzureMultiCloudConnector", "multiCloudConnectorId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.HybridConnectivity/publicCloudConnectors/TestConnector", } @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_AzureMultiCloudConnector.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_AzureMultiCloudConnector.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_blob_container.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_blob_container.py index 62c891a70d20..b35722582a7e 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_blob_container.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_blob_container.py @@ -39,6 +39,7 @@ def main(): "properties": { "blobContainerName": "examples-blobcontainer", "description": "Example Storage Blob Container Endpoint Description", + "endpointKind": "Target", "endpointType": "AzureStorageBlobContainer", "storageAccountResourceId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.Storage/storageAccounts/examplesa", } @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_AzureStorageBlobContainer.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_AzureStorageBlobContainer.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_nfs_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_nfs_file_share.py index df4b23277ea2..337a8705a09a 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_nfs_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_nfs_file_share.py @@ -38,6 +38,7 @@ def main(): endpoint={ "properties": { "description": "Example Storage File Share Endpoint Description", + "endpointKind": "Target", "endpointType": "AzureStorageNfsFileShare", "fileShareName": "examples-fileshare", "storageAccountResourceId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.Storage/storageAccounts/examplesa", @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_AzureStorageNfsFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_AzureStorageNfsFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_smb_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_smb_file_share.py index 5b81707213a8..96f29601b1f6 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_smb_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_azure_storage_smb_file_share.py @@ -38,6 +38,7 @@ def main(): endpoint={ "properties": { "description": "Example Storage File Share Endpoint Description", + "endpointKind": "Target", "endpointType": "AzureStorageSmbFileShare", "fileShareName": "examples-fileshare", "storageAccountResourceId": "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.Storage/storageAccounts/examplesa", @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_AzureStorageSmbFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_AzureStorageSmbFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_nfs_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_nfs_mount.py index edaa3eb342d3..c8cefa17baa8 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_nfs_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_nfs_mount.py @@ -37,6 +37,7 @@ def main(): endpoint={ "properties": { "description": "Example NFS Mount Endpoint Description", + "endpointKind": "Source", "endpointType": "NfsMount", "export": "examples-exportName", "host": "0.0.0.0", @@ -46,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_NfsMount.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_NfsMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_s3_with_hmac.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_s3_with_hmac.py new file mode 100644 index 000000000000..e74bdb9617fc --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_s3_with_hmac.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python endpoints_create_or_update_s3_with_hmac.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.endpoints.create_or_update( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + endpoint_name="examples-endpointName", + endpoint={ + "properties": { + "credentials": { + "accessKeyUri": "https://examples-azureKeyVault.vault.azure.net/secrets/examples-access", + "secretKeyUri": "https://examples-azureKeyVault.vault.azure.net/secrets/examples-secret", + "type": "AzureKeyVaultS3WithHMAC", + }, + "description": "Example S3WithHmac Endpoint Description", + "endpointKind": "Source", + "endpointType": "S3WithHMAC", + "sourceType": "GCS", + "sourceUri": "https://examples-bucket.s3.amazonaws.com/prefix/", + } + }, + ) + print(response) + + +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_S3WithHMAC.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_smb_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_smb_mount.py index 842f0578f802..f4261f7ace9d 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_smb_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_create_or_update_smb_mount.py @@ -42,6 +42,7 @@ def main(): "usernameUri": "https://examples-azureKeyVault.vault.azure.net/secrets/examples-username", }, "description": "Example SMB Mount Endpoint Description", + "endpointKind": "Source", "endpointType": "SmbMount", "host": "0.0.0.0", "shareName": "examples-shareName", @@ -51,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_CreateOrUpdate_SmbMount.json +# x-ms-original-file: 2025-12-01/Endpoints_CreateOrUpdate_SmbMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_delete.py index 1848334d0b4e..71a951ef31ea 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_delete.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2025-07-01/Endpoints_Delete.json +# x-ms-original-file: 2025-12-01/Endpoints_Delete.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_multi_cloud_connector.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_multi_cloud_connector.py index b3e2596e9c88..bd8b2dea63aa 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_multi_cloud_connector.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_multi_cloud_connector.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_AzureMultiCloudConnector.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_AzureMultiCloudConnector.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_blob_container.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_blob_container.py index 50d00768a4a7..dec93a23fe74 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_blob_container.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_blob_container.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_AzureStorageBlobContainer.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_AzureStorageBlobContainer.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_nfs_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_nfs_file_share.py index 3428966b3da0..c95b9eb3982b 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_nfs_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_nfs_file_share.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_AzureStorageNfsFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_AzureStorageNfsFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_smb_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_smb_file_share.py index a3c5132f1e18..e7bd0a1c7666 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_smb_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_azure_storage_smb_file_share.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_AzureStorageSmbFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_AzureStorageSmbFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_nfs_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_nfs_mount.py index 1c66a51e9e01..fec504f17fa6 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_nfs_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_nfs_mount.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_NfsMount.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_NfsMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_s3_with_hmac.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_s3_with_hmac.py new file mode 100644 index 000000000000..a30fc085d3dc --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_s3_with_hmac.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python endpoints_get_s3_with_hmac.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.endpoints.get( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + endpoint_name="examples-endpointName", + ) + print(response) + + +# x-ms-original-file: 2025-12-01/Endpoints_Get_S3WithHMAC.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_smb_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_smb_mount.py index f230341bdfbe..15464aefe606 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_smb_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_get_smb_mount.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Get_SmbMount.json +# x-ms-original-file: 2025-12-01/Endpoints_Get_SmbMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_list.py index 5ce490b1a33a..7cd2af04612a 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/Endpoints_List.json +# x-ms-original-file: 2025-12-01/Endpoints_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_multi_cloud_connector.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_multi_cloud_connector.py index 4d64ee1bec6f..d6e0b40e7ba8 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_multi_cloud_connector.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_multi_cloud_connector.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_AzureMultiCloudConnector.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_AzureMultiCloudConnector.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_blob_container.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_blob_container.py index 07b134a5099d..0cbb10d77c66 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_blob_container.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_blob_container.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_AzureStorageBlobContainer.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_AzureStorageBlobContainer.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_nfs_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_nfs_file_share.py index a0e2ef66d235..694834e53f16 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_nfs_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_nfs_file_share.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_AzureStorageNfsFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_AzureStorageNfsFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_smb_file_share.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_smb_file_share.py index fc9bf13c8609..0ffbd601f887 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_smb_file_share.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_azure_storage_smb_file_share.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_AzureStorageSmbFileShare.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_AzureStorageSmbFileShare.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_nfs_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_nfs_mount.py index 9a0e8a212a6c..7c5327bdb151 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_nfs_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_nfs_mount.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_NfsMount.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_NfsMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_s3_with_hmac.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_s3_with_hmac.py new file mode 100644 index 000000000000..c5b7154a715c --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_s3_with_hmac.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python endpoints_update_s3_with_hmac.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.endpoints.update( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + endpoint_name="examples-endpointName", + endpoint={"properties": {"description": "Updated Endpoint Description", "endpointType": "S3WithHMAC"}}, + ) + print(response) + + +# x-ms-original-file: 2025-12-01/Endpoints_Update_S3WithHMAC.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_smb_mount.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_smb_mount.py index b5a39dad5c25..136ca81ea07f 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_smb_mount.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/endpoints_update_smb_mount.py @@ -49,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Endpoints_Update_SmbMount.json +# x-ms-original-file: 2025-12-01/Endpoints_Update_SmbMount.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update.py index 0ba203ca184e..01769181fcaf 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,9 @@ def main(): job_definition={ "properties": { "agentName": "migration-agent", + "connections": [ + "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.StorageMover/storageMovers/examples-storageMoverName/connections/example-connection" + ], "copyMode": "Additive", "description": "Example Job Definition Description", "jobType": "OnPremToCloud", @@ -51,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_CreateOrUpdate.json +# x-ms-original-file: 2025-12-01/JobDefinitions_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_cloud_to_cloud.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_cloud_to_cloud.py index 6d35cd48ad0a..50f9296b7208 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_cloud_to_cloud.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_cloud_to_cloud.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,9 @@ def main(): job_definition={ "properties": { "agentName": "dummy-agent", + "connections": [ + "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.StorageMover/storageMovers/examples-storageMoverName/connections/example-connection" + ], "copyMode": "Additive", "description": "Example Job Definition Description", "jobType": "CloudToCloud", @@ -51,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_CreateOrUpdate_CloudToCloud.json +# x-ms-original-file: 2025-12-01/JobDefinitions_CreateOrUpdate_CloudToCloud.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_with_schedule.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_with_schedule.py new file mode 100644 index 000000000000..10d5017e8d93 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_create_or_update_with_schedule.py @@ -0,0 +1,68 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python job_definitions_create_or_update_with_schedule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.job_definitions.create_or_update( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + project_name="examples-projectName", + job_definition_name="examples-jobDefinitionName", + job_definition={ + "properties": { + "agentName": "dummy-agent", + "connections": [ + "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.StorageMover/storageMovers/examples-storageMoverName/connections/example-connection" + ], + "copyMode": "Additive", + "description": "Example Job Definition Description", + "jobType": "CloudToCloud", + "schedule": { + "daysOfWeek": ["Monday", "Wednesday", "Friday"], + "endDate": "2025-12-31T12:00:00Z", + "executionTime": {"hour": 9, "minute": 0}, + "frequency": "Weekly", + "isActive": True, + "startDate": "2025-12-01T00:00:00Z", + }, + "sourceName": "examples-sourceEndpointName", + "sourceSubpath": "/", + "targetName": "examples-targetEndpointName", + "targetSubpath": "/", + } + }, + ) + print(response) + + +# x-ms-original-file: 2025-12-01/JobDefinitions_CreateOrUpdate_With_Schedule.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_delete.py index 62b4b597ffdc..05216cfced65 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_delete.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_delete.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: 2025-07-01/JobDefinitions_Delete.json +# x-ms-original-file: 2025-12-01/JobDefinitions_Delete.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get.py index b26474bb882b..9b625be2be03 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_Get.json +# x-ms-original-file: 2025-12-01/JobDefinitions_Get.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get_with_schedule.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get_with_schedule.py new file mode 100644 index 000000000000..7732580b52d6 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_get_with_schedule.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python job_definitions_get_with_schedule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.job_definitions.get( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + project_name="examples-projectName", + job_definition_name="examples-jobDefinitionName", + ) + print(response) + + +# x-ms-original-file: 2025-12-01/JobDefinitions_Get_With_Schedule.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_list.py index 7116d291c02b..726c5517d702 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_list.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/JobDefinitions_List.json +# x-ms-original-file: 2025-12-01/JobDefinitions_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_start_job.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_start_job.py index 42d082f60dac..d1a8b72903ab 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_start_job.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_start_job.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_StartJob.json +# x-ms-original-file: 2025-12-01/JobDefinitions_StartJob.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_stop_job.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_stop_job.py index 0c8a7390a63e..4967fa14b70e 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_stop_job.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_stop_job.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_StopJob.json +# x-ms-original-file: 2025-12-01/JobDefinitions_StopJob.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_update.py index f7d2eba043d0..5191da7747d7 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_definitions_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -36,12 +37,18 @@ def main(): project_name="examples-projectName", job_definition_name="examples-jobDefinitionName", job_definition={ - "properties": {"agentName": "updatedAgentName", "description": "Updated Job Definition Description"} + "properties": { + "agentName": "updatedAgentName", + "connections": [ + "/subscriptions/60bcfc77-6589-4da2-b7fd-f9ec9322cf95/resourceGroups/examples-rg/providers/Microsoft.StorageMover/storageMovers/examples-storageMoverName/connections/example-connection" + ], + "description": "Updated Job Definition Description", + } }, ) print(response) -# x-ms-original-file: 2025-07-01/JobDefinitions_Update.json +# x-ms-original-file: 2025-12-01/JobDefinitions_Update.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get.py index ac4f8c29ab26..3ec106223433 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/JobRuns_Get.json +# x-ms-original-file: 2025-12-01/JobRuns_Get.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get_with_schedule.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get_with_schedule.py new file mode 100644 index 000000000000..62d50a22c2a0 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_get_with_schedule.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.storagemover import StorageMoverMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-storagemover +# USAGE + python job_runs_get_with_schedule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StorageMoverMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.job_runs.get( + resource_group_name="examples-rg", + storage_mover_name="examples-storageMoverName", + project_name="examples-projectName", + job_definition_name="examples-jobDefinitionName", + job_run_name="examples-jobRunName", + ) + print(response) + + +# x-ms-original-file: 2025-12-01/JobRuns_Get_With_Schedule.json +if __name__ == "__main__": + main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_list.py index ea18a4927667..19fd3388be67 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/job_runs_list.py @@ -40,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/JobRuns_List.json +# x-ms-original-file: 2025-12-01/JobRuns_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/operations_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/operations_list.py index e714f99d66c2..50b820c9d2f3 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/operations_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/operations_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/Operations_List.json +# x-ms-original-file: 2025-12-01/Operations_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_create_or_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_create_or_update.py index 0000f57d59a0..c058b134b1bd 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_create_or_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_create_or_update.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Projects_CreateOrUpdate.json +# x-ms-original-file: 2025-12-01/Projects_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_delete.py index 41b8d5de4439..f946b745e395 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_delete.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2025-07-01/Projects_Delete.json +# x-ms-original-file: 2025-12-01/Projects_Delete.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_get.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_get.py index ca4532a59604..9cd864a216d9 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_get.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Projects_Get.json +# x-ms-original-file: 2025-12-01/Projects_Get.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_list.py index b77231c6d103..7ba1922c0b64 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/Projects_List.json +# x-ms-original-file: 2025-12-01/Projects_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_update.py index e6ee7c3e189c..c81b165ceb1e 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/projects_update.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/Projects_Update.json +# x-ms-original-file: 2025-12-01/Projects_Update.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_create_or_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_create_or_update.py index ba72d6bc5f59..6186322c9cf7 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_create_or_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_create_or_update.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/StorageMovers_CreateOrUpdate.json +# x-ms-original-file: 2025-12-01/StorageMovers_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_delete.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_delete.py index 6c3052cdc390..d31419607b02 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_delete.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: 2025-07-01/StorageMovers_Delete.json +# x-ms-original-file: 2025-12-01/StorageMovers_Delete.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_get.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_get.py index 091ef986a30c..74475f64f008 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_get.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/StorageMovers_Get.json +# x-ms-original-file: 2025-12-01/StorageMovers_Get.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list.py index 62fb215bdc2e..515f1db1aadd 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/StorageMovers_List.json +# x-ms-original-file: 2025-12-01/StorageMovers_List.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list_by_subscription.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list_by_subscription.py index 23e95b5e5b2b..b5bcbd15e88a 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list_by_subscription.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_list_by_subscription.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2025-07-01/StorageMovers_ListBySubscription.json +# x-ms-original-file: 2025-12-01/StorageMovers_ListBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_update.py b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_update.py index 58aca58a3ea2..103e84c18cc8 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_update.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_samples/storage_movers_update.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2025-07-01/StorageMovers_Update.json +# x-ms-original-file: 2025-12-01/StorageMovers_Update.json if __name__ == "__main__": main() diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations.py b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations.py new file mode 100644 index 000000000000..ee4d67eb0124 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storagemover import StorageMoverMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageMoverMgmtConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageMoverMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connections_create_or_update(self, resource_group): + response = self.client.connections.create_or_update( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + connection={ + "properties": { + "privateLinkServiceId": "str", + "connectionStatus": "str", + "description": "str", + "jobList": ["str"], + "privateEndpointName": "str", + "privateEndpointResourceId": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connections_get(self, resource_group): + response = self.client.connections.get( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connections_list(self, resource_group): + response = self.client.connections.list( + resource_group_name=resource_group.name, + storage_mover_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_connections_begin_delete(self, resource_group): + response = self.client.connections.begin_delete( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations_async.py b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations_async.py new file mode 100644 index 000000000000..fdf03f664154 --- /dev/null +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_connections_operations_async.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.storagemover.aio import StorageMoverMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStorageMoverMgmtConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StorageMoverMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connections_create_or_update(self, resource_group): + response = await self.client.connections.create_or_update( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + connection={ + "properties": { + "privateLinkServiceId": "str", + "connectionStatus": "str", + "description": "str", + "jobList": ["str"], + "privateEndpointName": "str", + "privateEndpointResourceId": "str", + "provisioningState": "str", + }, + "id": "str", + "name": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connections_get(self, resource_group): + response = await self.client.connections.get( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connections_list(self, resource_group): + response = self.client.connections.list( + resource_group_name=resource_group.name, + storage_mover_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_connections_begin_delete(self, resource_group): + response = await ( + await self.client.connections.begin_delete( + resource_group_name=resource_group.name, + storage_mover_name="str", + connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations.py b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations.py index 055693253090..d13fd3e6128b 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations.py @@ -46,12 +46,25 @@ def test_job_definitions_create_or_update(self, resource_group): "targetName": "str", "agentName": "str", "agentResourceId": "str", + "connections": ["str"], + "dataIntegrityValidation": "str", "description": "str", "jobType": "str", "latestJobRunName": "str", "latestJobRunResourceId": "str", "latestJobRunStatus": "str", + "preservePermissions": bool, "provisioningState": "str", + "schedule": { + "frequency": "str", + "isActive": bool, + "cronExpression": "str", + "daysOfMonth": [0], + "daysOfWeek": ["str"], + "endDate": "2020-02-20 00:00:00", + "executionTime": {"hour": 0, "minute": 0}, + "startDate": "2020-02-20 00:00:00", + }, "sourceResourceId": "str", "sourceSubpath": "str", "sourceTargetMap": { @@ -103,7 +116,15 @@ def test_job_definitions_update(self, resource_group): storage_mover_name="str", project_name="str", job_definition_name="str", - job_definition={"properties": {"agentName": "str", "copyMode": "str", "description": "str"}}, + job_definition={ + "properties": { + "agentName": "str", + "connections": ["str"], + "copyMode": "str", + "dataIntegrityValidation": "str", + "description": "str", + } + }, ) # please add some check logic here by yourself diff --git a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations_async.py b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations_async.py index 9ee03f2da235..ffa118f9a373 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations_async.py +++ b/sdk/storagemover/azure-mgmt-storagemover/generated_tests/test_storage_mover_mgmt_job_definitions_operations_async.py @@ -47,12 +47,25 @@ async def test_job_definitions_create_or_update(self, resource_group): "targetName": "str", "agentName": "str", "agentResourceId": "str", + "connections": ["str"], + "dataIntegrityValidation": "str", "description": "str", "jobType": "str", "latestJobRunName": "str", "latestJobRunResourceId": "str", "latestJobRunStatus": "str", + "preservePermissions": bool, "provisioningState": "str", + "schedule": { + "frequency": "str", + "isActive": bool, + "cronExpression": "str", + "daysOfMonth": [0], + "daysOfWeek": ["str"], + "endDate": "2020-02-20 00:00:00", + "executionTime": {"hour": 0, "minute": 0}, + "startDate": "2020-02-20 00:00:00", + }, "sourceResourceId": "str", "sourceSubpath": "str", "sourceTargetMap": { @@ -104,7 +117,15 @@ async def test_job_definitions_update(self, resource_group): storage_mover_name="str", project_name="str", job_definition_name="str", - job_definition={"properties": {"agentName": "str", "copyMode": "str", "description": "str"}}, + job_definition={ + "properties": { + "agentName": "str", + "connections": ["str"], + "copyMode": "str", + "dataIntegrityValidation": "str", + "description": "str", + } + }, ) # please add some check logic here by yourself diff --git a/sdk/storagemover/azure-mgmt-storagemover/pyproject.toml b/sdk/storagemover/azure-mgmt-storagemover/pyproject.toml index f598b535c149..b39e88739b3a 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/pyproject.toml +++ b/sdk/storagemover/azure-mgmt-storagemover/pyproject.toml @@ -1,3 +1,72 @@ +[build-system] +requires = [ + "setuptools>=77.0.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-mgmt-storagemover" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Azure Storagemover Management Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = [ + "azure", + "azure sdk", +] +dependencies = [ + "isodate>=0.6.1", + "azure-mgmt-core>=1.6.0", + "typing-extensions>=4.6.0", +] +dynamic = [ + "version", + "readme", +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic.version] +attr = "azure.mgmt.storagemover._version.VERSION" + +[tool.setuptools.dynamic.readme] +file = [ + "README.md", + "CHANGELOG.md", +] +content-type = "text/markdown" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.mgmt", +] + +[tool.setuptools.package-data] +pytyped = [ + "py.typed", +] + [tool.azure-sdk-build] breaking = false mypy = false @@ -16,3 +85,4 @@ need_msrestazure = false need_azuremgmtcore = true sample_link = "" title = "StorageMoverMgmtClient" +exclude_folders = "" diff --git a/sdk/storagemover/azure-mgmt-storagemover/setup.py b/sdk/storagemover/azure-mgmt-storagemover/setup.py deleted file mode 100644 index 9b04ee31f15c..000000000000 --- a/sdk/storagemover/azure-mgmt-storagemover/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import re -import os.path -from io import open -from setuptools import find_packages, setup - -# Change the PACKAGE_NAME only to change folder and different name -PACKAGE_NAME = "azure-mgmt-storagemover" -PACKAGE_PPRINT_NAME = "Storagemover Management" - -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace("-", ".") - -# Version extraction inspired from 'requests' -with open( - os.path.join(package_folder_path, "version.py") - if os.path.exists(os.path.join(package_folder_path, "version.py")) - else os.path.join(package_folder_path, "_version.py"), - "r", -) as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) - -if not version: - raise RuntimeError("Cannot find version information") - -with open("README.md", encoding="utf-8") as f: - readme = f.read() -with open("CHANGELOG.md", encoding="utf-8") as f: - changelog = f.read() - -setup( - name=PACKAGE_NAME, - version=version, - description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), - long_description=readme + "\n\n" + changelog, - long_description_content_type="text/markdown", - license="MIT License", - author="Microsoft Corporation", - author_email="azpysdkhelp@microsoft.com", - url="https://github.com/Azure/azure-sdk-for-python", - keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "License :: OSI Approved :: MIT License", - ], - zip_safe=False, - packages=find_packages( - exclude=[ - "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.mgmt", - ] - ), - include_package_data=True, - package_data={ - "pytyped": ["py.typed"], - }, - install_requires=[ - "isodate>=0.6.1", - "typing-extensions>=4.6.0", - "azure-common>=1.1", - "azure-mgmt-core>=1.6.0", - ], - python_requires=">=3.9", -) diff --git a/sdk/storagemover/azure-mgmt-storagemover/tsp-location.yaml b/sdk/storagemover/azure-mgmt-storagemover/tsp-location.yaml index 98a0f9f3ac99..ca73b7e1198c 100644 --- a/sdk/storagemover/azure-mgmt-storagemover/tsp-location.yaml +++ b/sdk/storagemover/azure-mgmt-storagemover/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/storagemover/StorageMover.Management -commit: af759847e0abab741437d695782ad62d7b2cce14 +commit: b8861b82d3b47a8cf9c2dfae4a29ec9ff41eacbf repo: Azure/azure-rest-api-specs additionalDirectories: