diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md b/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md index 9d1e2c8c0233..3a67e6633981 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md +++ b/sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.0.0 (2025-12-21) + +change log generation failed!!! You need to write it manually!!! + ## 13.0.0b7 (2024-11-05) ### Other Changes diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/MANIFEST.in b/sdk/loganalytics/azure-mgmt-loganalytics/MANIFEST.in index 4fa315fddc19..999b2a456e3d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/MANIFEST.in +++ b/sdk/loganalytics/azure-mgmt-loganalytics/MANIFEST.in @@ -1,8 +1,7 @@ -include _meta.json -recursive-include tests *.py *.json -recursive-include samples *.py *.md include *.md -include azure/__init__.py -include azure/mgmt/__init__.py include LICENSE include azure/mgmt/loganalytics/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/mgmt/__init__.py diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json b/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json deleted file mode 100644 index 5edd9181339e..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/_meta.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "commit": "690a7656d65a03b134fa44fc7bb8013dc18a15b5", - "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.10.2", - "use": [ - "@autorest/python@6.19.0", - "@autorest/modelerfour@4.27.0" - ], - "autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/azure-sdk-for-python/sdk --tag=package-2022-10 --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", - "readme": "specification/operationalinsights/resource-manager/readme.md" -} \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/_metadata.json b/sdk/loganalytics/azure-mgmt-loganalytics/_metadata.json new file mode 100644 index 000000000000..6334041cb9e5 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/_metadata.json @@ -0,0 +1,11 @@ +{ + "commit": "788c11df9da57f04d4f9e3a72c05fea0e0bd4c6e", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest": "3.10.2", + "use": [ + "@autorest/python@6.42.0", + "@autorest/modelerfour@4.27.0" + ], + "autorest_command": "autorest specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --package-mode=azure-mgmt --python --python-sdks-folder=/mnt/vss/_work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.42.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", + "readme": "specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/readme.md" +} \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/apiview-properties.json b/sdk/loganalytics/azure-mgmt-loganalytics/apiview-properties.json new file mode 100644 index 000000000000..2fc9cacdbaf0 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/apiview-properties.json @@ -0,0 +1,315 @@ +{ + "CrossLanguagePackageId": null, + "CrossLanguageDefinitionId": { + "azure.mgmt.loganalytics.models.AccessRule": null, + "azure.mgmt.loganalytics.models.AccessRuleProperties": null, + "azure.mgmt.loganalytics.models.AccessRulePropertiesSubscriptionsItem": null, + "azure.mgmt.loganalytics.models.AssociatedWorkspace": null, + "azure.mgmt.loganalytics.models.AvailableServiceTier": null, + "azure.mgmt.loganalytics.models.Resource": null, + "azure.mgmt.loganalytics.models.AzureEntityResource": null, + "azure.mgmt.loganalytics.models.CapacityReservationProperties": null, + "azure.mgmt.loganalytics.models.TrackedResource": null, + "azure.mgmt.loganalytics.models.Cluster": null, + "azure.mgmt.loganalytics.models.ClusterListResult": null, + "azure.mgmt.loganalytics.models.ClusterPatch": null, + "azure.mgmt.loganalytics.models.ClusterReplicationProperties": null, + "azure.mgmt.loganalytics.models.ClusterSku": null, + "azure.mgmt.loganalytics.models.Column": null, + "azure.mgmt.loganalytics.models.CoreSummary": null, + "azure.mgmt.loganalytics.models.ProxyResource": null, + "azure.mgmt.loganalytics.models.DataExport": null, + "azure.mgmt.loganalytics.models.DataExportListResult": null, + "azure.mgmt.loganalytics.models.DataSource": null, + "azure.mgmt.loganalytics.models.DataSourceFilter": null, + "azure.mgmt.loganalytics.models.DataSourceListResult": null, + "azure.mgmt.loganalytics.models.ErrorAdditionalInfo": null, + "azure.mgmt.loganalytics.models.ErrorDetail": null, + "azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated": null, + "azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated2": null, + "azure.mgmt.loganalytics.models.ErrorResponse": null, + "azure.mgmt.loganalytics.models.ErrorResponseAutoGenerated": null, + "azure.mgmt.loganalytics.models.ErrorResponseAutoGenerated2": null, + "azure.mgmt.loganalytics.models.Identity": null, + "azure.mgmt.loganalytics.models.IntelligencePack": null, + "azure.mgmt.loganalytics.models.KeyVaultProperties": null, + "azure.mgmt.loganalytics.models.LinkedService": null, + "azure.mgmt.loganalytics.models.LinkedServiceListResult": null, + "azure.mgmt.loganalytics.models.LinkedStorageAccountsListResult": null, + "azure.mgmt.loganalytics.models.LinkedStorageAccountsResource": null, + "azure.mgmt.loganalytics.models.ResourceAutoGenerated": null, + "azure.mgmt.loganalytics.models.TrackedResourceAutoGenerated": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPack": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackListResult": null, + "azure.mgmt.loganalytics.models.ProxyResourceAutoGenerated": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQueryListResult": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQueryPropertiesRelated": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchProperties": null, + "azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuerySearchPropertiesRelated": null, + "azure.mgmt.loganalytics.models.ManagedServiceIdentity": null, + "azure.mgmt.loganalytics.models.ManagementGroup": null, + "azure.mgmt.loganalytics.models.MetricName": null, + "azure.mgmt.loganalytics.models.NetworkSecurityPerimeter": null, + "azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration": null, + "azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationListResult": null, + "azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationProperties": null, + "azure.mgmt.loganalytics.models.NetworkSecurityProfile": null, + "azure.mgmt.loganalytics.models.Operation": null, + "azure.mgmt.loganalytics.models.OperationDisplay": null, + "azure.mgmt.loganalytics.models.OperationListResult": null, + "azure.mgmt.loganalytics.models.OperationStatus": null, + "azure.mgmt.loganalytics.models.PrivateLinkScopedResource": null, + "azure.mgmt.loganalytics.models.ProvisioningIssue": null, + "azure.mgmt.loganalytics.models.ProvisioningIssueProperties": null, + "azure.mgmt.loganalytics.models.ResourceAssociation": null, + "azure.mgmt.loganalytics.models.RestoredLogs": null, + "azure.mgmt.loganalytics.models.ResultStatistics": null, + "azure.mgmt.loganalytics.models.RuleDefinition": null, + "azure.mgmt.loganalytics.models.SavedSearch": null, + "azure.mgmt.loganalytics.models.SavedSearchesListResult": null, + "azure.mgmt.loganalytics.models.Schema": null, + "azure.mgmt.loganalytics.models.SearchGetSchemaResponse": null, + "azure.mgmt.loganalytics.models.SearchMetadata": null, + "azure.mgmt.loganalytics.models.SearchMetadataSchema": null, + "azure.mgmt.loganalytics.models.SearchResults": null, + "azure.mgmt.loganalytics.models.SearchSchemaValue": null, + "azure.mgmt.loganalytics.models.SearchSort": null, + "azure.mgmt.loganalytics.models.SharedKeys": null, + "azure.mgmt.loganalytics.models.StorageAccount": null, + "azure.mgmt.loganalytics.models.StorageInsight": null, + "azure.mgmt.loganalytics.models.StorageInsightListResult": null, + "azure.mgmt.loganalytics.models.StorageInsightStatus": null, + "azure.mgmt.loganalytics.models.SummaryLogs": null, + "azure.mgmt.loganalytics.models.SummaryLogsListResult": null, + "azure.mgmt.loganalytics.models.SummaryLogsRetryBin": null, + "azure.mgmt.loganalytics.models.SummaryLogsRetryBinProperties": null, + "azure.mgmt.loganalytics.models.SystemData": null, + "azure.mgmt.loganalytics.models.Table": null, + "azure.mgmt.loganalytics.models.TablesListResult": null, + "azure.mgmt.loganalytics.models.Tag": null, + "azure.mgmt.loganalytics.models.TagsResource": null, + "azure.mgmt.loganalytics.models.UsageMetric": null, + "azure.mgmt.loganalytics.models.UserAssignedIdentity": null, + "azure.mgmt.loganalytics.models.UserIdentityProperties": null, + "azure.mgmt.loganalytics.models.Workspace": null, + "azure.mgmt.loganalytics.models.WorkspaceCapping": null, + "azure.mgmt.loganalytics.models.WorkspaceFailoverProperties": null, + "azure.mgmt.loganalytics.models.WorkspaceFeatures": null, + "azure.mgmt.loganalytics.models.WorkspaceListManagementGroupsResult": null, + "azure.mgmt.loganalytics.models.WorkspaceListResult": null, + "azure.mgmt.loganalytics.models.WorkspaceListUsagesResult": null, + "azure.mgmt.loganalytics.models.WorkspacePatch": null, + "azure.mgmt.loganalytics.models.WorkspacePurgeBody": null, + "azure.mgmt.loganalytics.models.WorkspacePurgeBodyFilters": null, + "azure.mgmt.loganalytics.models.WorkspacePurgeResponse": null, + "azure.mgmt.loganalytics.models.WorkspacePurgeStatusResponse": null, + "azure.mgmt.loganalytics.models.WorkspaceReplicationPatProperties": null, + "azure.mgmt.loganalytics.models.WorkspaceReplicationProperties": null, + "azure.mgmt.loganalytics.models.WorkspaceSku": null, + "azure.mgmt.loganalytics.models.SkuNameEnum": null, + "azure.mgmt.loganalytics.models.ManagedServiceIdentityType": null, + "azure.mgmt.loganalytics.models.ClusterSkuNameEnum": null, + "azure.mgmt.loganalytics.models.ClusterEntityStatus": null, + "azure.mgmt.loganalytics.models.BillingType": null, + "azure.mgmt.loganalytics.models.ClusterReplicationState": null, + "azure.mgmt.loganalytics.models.Type": null, + "azure.mgmt.loganalytics.models.DataSourceKind": null, + "azure.mgmt.loganalytics.models.LinkedServiceEntityStatus": null, + "azure.mgmt.loganalytics.models.DataSourceType": null, + "azure.mgmt.loganalytics.models.CreatedByType": null, + "azure.mgmt.loganalytics.models.SearchSortEnum": null, + "azure.mgmt.loganalytics.models.StorageInsightState": null, + "azure.mgmt.loganalytics.models.ColumnTypeEnum": null, + "azure.mgmt.loganalytics.models.ColumnDataTypeHintEnum": null, + "azure.mgmt.loganalytics.models.TablePlanEnum": null, + "azure.mgmt.loganalytics.models.SourceEnum": null, + "azure.mgmt.loganalytics.models.TableTypeEnum": null, + "azure.mgmt.loganalytics.models.TableSubTypeEnum": null, + "azure.mgmt.loganalytics.models.ProvisioningStateEnum": null, + "azure.mgmt.loganalytics.models.IdentityType": null, + "azure.mgmt.loganalytics.models.WorkspaceEntityStatus": null, + "azure.mgmt.loganalytics.models.WorkspaceSkuNameEnum": null, + "azure.mgmt.loganalytics.models.DataIngestionStatus": null, + "azure.mgmt.loganalytics.models.PublicNetworkAccessType": null, + "azure.mgmt.loganalytics.models.WorkspaceReplicationState": null, + "azure.mgmt.loganalytics.models.WorkspaceFailoverState": null, + "azure.mgmt.loganalytics.models.AccessRuleDirection": null, + "azure.mgmt.loganalytics.models.IssueType": null, + "azure.mgmt.loganalytics.models.Severity": null, + "azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationProvisioningState": null, + "azure.mgmt.loganalytics.models.ResourceAssociationAccessMode": null, + "azure.mgmt.loganalytics.models.RuleTypeEnum": null, + "azure.mgmt.loganalytics.models.StatusCodeEnum": null, + "azure.mgmt.loganalytics.models.TimeSelectorEnum": null, + "azure.mgmt.loganalytics.models.PurgeState": null, + "azure.mgmt.loganalytics.operations.AvailableServiceTiersOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.AvailableServiceTiersOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.list": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.begin_delete": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.begin_delete": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.get": null, + "azure.mgmt.loganalytics.operations.ClustersOperations.begin_update": null, + "azure.mgmt.loganalytics.aio.operations.ClustersOperations.begin_update": null, + "azure.mgmt.loganalytics.operations.DataExportsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.DataExportsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.DataExportsOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.DataExportsOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.DataExportsOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.DataExportsOperations.get": null, + "azure.mgmt.loganalytics.operations.DataExportsOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.DataExportsOperations.delete": null, + "azure.mgmt.loganalytics.operations.DataSourcesOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.DataSourcesOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.DataSourcesOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.DataSourcesOperations.delete": null, + "azure.mgmt.loganalytics.operations.DataSourcesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.DataSourcesOperations.get": null, + "azure.mgmt.loganalytics.operations.DataSourcesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.DataSourcesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.GatewaysOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.GatewaysOperations.delete": null, + "azure.mgmt.loganalytics.operations.IntelligencePacksOperations.disable": null, + "azure.mgmt.loganalytics.aio.operations.IntelligencePacksOperations.disable": null, + "azure.mgmt.loganalytics.operations.IntelligencePacksOperations.enable": null, + "azure.mgmt.loganalytics.aio.operations.IntelligencePacksOperations.enable": null, + "azure.mgmt.loganalytics.operations.IntelligencePacksOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.IntelligencePacksOperations.list": null, + "azure.mgmt.loganalytics.operations.LinkedServicesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.LinkedServicesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.operations.LinkedServicesOperations.begin_delete": null, + "azure.mgmt.loganalytics.aio.operations.LinkedServicesOperations.begin_delete": null, + "azure.mgmt.loganalytics.operations.LinkedServicesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.LinkedServicesOperations.get": null, + "azure.mgmt.loganalytics.operations.LinkedServicesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.LinkedServicesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.LinkedStorageAccountsOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.LinkedStorageAccountsOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.LinkedStorageAccountsOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.LinkedStorageAccountsOperations.delete": null, + "azure.mgmt.loganalytics.operations.LinkedStorageAccountsOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.LinkedStorageAccountsOperations.get": null, + "azure.mgmt.loganalytics.operations.LinkedStorageAccountsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.LinkedStorageAccountsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.ManagementGroupsOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.ManagementGroupsOperations.list": null, + "azure.mgmt.loganalytics.operations.Operations.list": null, + "azure.mgmt.loganalytics.aio.operations.Operations.list": null, + "azure.mgmt.loganalytics.operations.OperationStatusesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.OperationStatusesOperations.get": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.list": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.search": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.search": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.get": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.put": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.put": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.update": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.update": null, + "azure.mgmt.loganalytics.operations.QueriesOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.QueriesOperations.delete": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.list": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.create_or_update_without_name": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.create_or_update_without_name": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.delete": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.get": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.QueryPacksOperations.update_tags": null, + "azure.mgmt.loganalytics.aio.operations.QueryPacksOperations.update_tags": null, + "azure.mgmt.loganalytics.operations.SavedSearchesOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations.delete": null, + "azure.mgmt.loganalytics.operations.SavedSearchesOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.SavedSearchesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations.get": null, + "azure.mgmt.loganalytics.operations.SavedSearchesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.SchemaOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.SchemaOperations.get": null, + "azure.mgmt.loganalytics.operations.SharedKeysOperations.get_shared_keys": null, + "azure.mgmt.loganalytics.aio.operations.SharedKeysOperations.get_shared_keys": null, + "azure.mgmt.loganalytics.operations.SharedKeysOperations.regenerate": null, + "azure.mgmt.loganalytics.aio.operations.SharedKeysOperations.regenerate": null, + "azure.mgmt.loganalytics.operations.StorageInsightConfigsOperations.create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.StorageInsightConfigsOperations.create_or_update": null, + "azure.mgmt.loganalytics.operations.StorageInsightConfigsOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.StorageInsightConfigsOperations.get": null, + "azure.mgmt.loganalytics.operations.StorageInsightConfigsOperations.delete": null, + "azure.mgmt.loganalytics.aio.operations.StorageInsightConfigsOperations.delete": null, + "azure.mgmt.loganalytics.operations.StorageInsightConfigsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.StorageInsightConfigsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.TablesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.TablesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.operations.TablesOperations.begin_update": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.begin_update": null, + "azure.mgmt.loganalytics.operations.TablesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.get": null, + "azure.mgmt.loganalytics.operations.TablesOperations.begin_delete": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.begin_delete": null, + "azure.mgmt.loganalytics.operations.TablesOperations.migrate": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.migrate": null, + "azure.mgmt.loganalytics.operations.TablesOperations.cancel_search": null, + "azure.mgmt.loganalytics.aio.operations.TablesOperations.cancel_search": null, + "azure.mgmt.loganalytics.operations.UsagesOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.UsagesOperations.list": null, + "azure.mgmt.loganalytics.operations.WorkspacePurgeOperations.purge": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacePurgeOperations.purge": null, + "azure.mgmt.loganalytics.operations.WorkspacePurgeOperations.get_purge_status": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacePurgeOperations.get_purge_status": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.list": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.begin_delete": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.begin_delete": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.get": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.update": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.update": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.begin_failover": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.begin_failover": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.begin_failback": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.begin_failback": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.list_nsp": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.list_nsp": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.get_nsp": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.get_nsp": null, + "azure.mgmt.loganalytics.operations.WorkspacesOperations.begin_reconcile_nsp": null, + "azure.mgmt.loganalytics.aio.operations.WorkspacesOperations.begin_reconcile_nsp": null, + "azure.mgmt.loganalytics.operations.DeletedWorkspacesOperations.list": null, + "azure.mgmt.loganalytics.aio.operations.DeletedWorkspacesOperations.list": null, + "azure.mgmt.loganalytics.operations.DeletedWorkspacesOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.aio.operations.DeletedWorkspacesOperations.list_by_resource_group": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.list_by_workspace": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.begin_create_or_update": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.get": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.get": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.begin_delete": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.begin_delete": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.begin_start": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.begin_start": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.stop": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.stop": null, + "azure.mgmt.loganalytics.operations.SummaryLogsOperations.begin_retry_bin": null, + "azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations.begin_retry_bin": null + } +} \ No newline at end of file diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/__init__.py index 4353bcf34274..bffaef07aa46 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/__init__.py @@ -5,15 +5,21 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._log_analytics_management_client import LogAnalyticsManagementClient +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._log_analytics_management_client import LogAnalyticsManagementClient # type: ignore from ._version import VERSION __version__ = VERSION try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk @@ -21,6 +27,6 @@ __all__ = [ "LogAnalyticsManagementClient", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py index 4adc03491fa7..de7edd7a8e1a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy @@ -14,7 +14,7 @@ from ._version import VERSION if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports + from azure.core import AzureClouds from azure.core.credentials import TokenCredential @@ -28,9 +28,23 @@ class LogAnalyticsManagementClientConfiguration: # pylint: disable=too-many-ins :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: Api Version. Default value is "2025-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2025-07-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -38,6 +52,8 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs self.credential = credential self.subscription_id = subscription_id + self.cloud_setting = cloud_setting + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-loganalytics/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py index 7b63f83d07d8..10d56b3cdf61 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_log_analytics_management_client.py @@ -7,17 +7,19 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING, cast from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse +from azure.core.settings import settings from azure.mgmt.core import ARMPipelineClient from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints from . import models as _models from ._configuration import LogAnalyticsManagementClientConfiguration -from ._serialization import Deserializer, Serializer +from ._utils.serialization import Deserializer, Serializer from .operations import ( AvailableServiceTiersOperations, ClustersOperations, @@ -37,6 +39,7 @@ SchemaOperations, SharedKeysOperations, StorageInsightConfigsOperations, + SummaryLogsOperations, TablesOperations, UsagesOperations, WorkspacePurgeOperations, @@ -44,21 +47,24 @@ ) if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports + from azure.core import AzureClouds from azure.core.credentials import TokenCredential -class LogAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes +class LogAnalyticsManagementClient: # pylint: disable=too-many-instance-attributes """Operational Insights Client. - :ivar query_packs: QueryPacksOperations operations - :vartype query_packs: azure.mgmt.loganalytics.operations.QueryPacksOperations - :ivar queries: QueriesOperations operations - :vartype queries: azure.mgmt.loganalytics.operations.QueriesOperations + :ivar available_service_tiers: AvailableServiceTiersOperations operations + :vartype available_service_tiers: + azure.mgmt.loganalytics.operations.AvailableServiceTiersOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.loganalytics.operations.ClustersOperations :ivar data_exports: DataExportsOperations operations :vartype data_exports: azure.mgmt.loganalytics.operations.DataExportsOperations :ivar data_sources: DataSourcesOperations operations :vartype data_sources: azure.mgmt.loganalytics.operations.DataSourcesOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: azure.mgmt.loganalytics.operations.GatewaysOperations :ivar intelligence_packs: IntelligencePacksOperations operations :vartype intelligence_packs: azure.mgmt.loganalytics.operations.IntelligencePacksOperations :ivar linked_services: LinkedServicesOperations operations @@ -68,42 +74,47 @@ class LogAnalyticsManagementClient: # pylint: disable=client-accepts-api-versio azure.mgmt.loganalytics.operations.LinkedStorageAccountsOperations :ivar management_groups: ManagementGroupsOperations operations :vartype management_groups: azure.mgmt.loganalytics.operations.ManagementGroupsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.loganalytics.operations.Operations :ivar operation_statuses: OperationStatusesOperations operations :vartype operation_statuses: azure.mgmt.loganalytics.operations.OperationStatusesOperations + :ivar queries: QueriesOperations operations + :vartype queries: azure.mgmt.loganalytics.operations.QueriesOperations + :ivar query_packs: QueryPacksOperations operations + :vartype query_packs: azure.mgmt.loganalytics.operations.QueryPacksOperations + :ivar saved_searches: SavedSearchesOperations operations + :vartype saved_searches: azure.mgmt.loganalytics.operations.SavedSearchesOperations + :ivar schema: SchemaOperations operations + :vartype schema: azure.mgmt.loganalytics.operations.SchemaOperations :ivar shared_keys: SharedKeysOperations operations :vartype shared_keys: azure.mgmt.loganalytics.operations.SharedKeysOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.loganalytics.operations.UsagesOperations :ivar storage_insight_configs: StorageInsightConfigsOperations operations :vartype storage_insight_configs: azure.mgmt.loganalytics.operations.StorageInsightConfigsOperations - :ivar saved_searches: SavedSearchesOperations operations - :vartype saved_searches: azure.mgmt.loganalytics.operations.SavedSearchesOperations - :ivar available_service_tiers: AvailableServiceTiersOperations operations - :vartype available_service_tiers: - azure.mgmt.loganalytics.operations.AvailableServiceTiersOperations - :ivar gateways: GatewaysOperations operations - :vartype gateways: azure.mgmt.loganalytics.operations.GatewaysOperations - :ivar schema: SchemaOperations operations - :vartype schema: azure.mgmt.loganalytics.operations.SchemaOperations + :ivar tables: TablesOperations operations + :vartype tables: azure.mgmt.loganalytics.operations.TablesOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.loganalytics.operations.UsagesOperations :ivar workspace_purge: WorkspacePurgeOperations operations :vartype workspace_purge: azure.mgmt.loganalytics.operations.WorkspacePurgeOperations - :ivar clusters: ClustersOperations operations - :vartype clusters: azure.mgmt.loganalytics.operations.ClustersOperations - :ivar operations: Operations operations - :vartype operations: azure.mgmt.loganalytics.operations.Operations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.loganalytics.operations.WorkspacesOperations :ivar deleted_workspaces: DeletedWorkspacesOperations operations :vartype deleted_workspaces: azure.mgmt.loganalytics.operations.DeletedWorkspacesOperations - :ivar tables: TablesOperations operations - :vartype tables: azure.mgmt.loganalytics.operations.TablesOperations + :ivar summary_logs: SummaryLogsOperations operations + :vartype summary_logs: azure.mgmt.loganalytics.operations.SummaryLogsOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param base_url: Service URL. Default value is "https://management.azure.com". + :param base_url: Service URL. Default value is None. :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: Api Version. Default value is "2025-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -112,12 +123,24 @@ def __init__( self, credential: "TokenCredential", subscription_id: str, - base_url: str = "https://management.azure.com", + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) self._config = LogAnalyticsManagementClientConfiguration( - credential=credential, subscription_id=subscription_id, **kwargs + credential=credential, + subscription_id=subscription_id, + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs ) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -136,16 +159,19 @@ def __init__( policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, base_url), policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.query_packs = QueryPacksOperations(self._client, self._config, self._serialize, self._deserialize) - self.queries = QueriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.available_service_tiers = AvailableServiceTiersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.data_exports = DataExportsOperations(self._client, self._config, self._serialize, self._deserialize) self.data_sources = DataSourcesOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) self.intelligence_packs = IntelligencePacksOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -156,28 +182,26 @@ def __init__( self.management_groups = ManagementGroupsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.operation_statuses = OperationStatusesOperations( self._client, self._config, self._serialize, self._deserialize ) + self.queries = QueriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.query_packs = QueryPacksOperations(self._client, self._config, self._serialize, self._deserialize) + self.saved_searches = SavedSearchesOperations(self._client, self._config, self._serialize, self._deserialize) + self.schema = SchemaOperations(self._client, self._config, self._serialize, self._deserialize) self.shared_keys = SharedKeysOperations(self._client, self._config, self._serialize, self._deserialize) - self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_insight_configs = StorageInsightConfigsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.saved_searches = SavedSearchesOperations(self._client, self._config, self._serialize, self._deserialize) - self.available_service_tiers = AvailableServiceTiersOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) - self.schema = SchemaOperations(self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.workspace_purge = WorkspacePurgeOperations(self._client, self._config, self._serialize, self._deserialize) - self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) self.deleted_workspaces = DeletedWorkspacesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) + self.summary_logs = SummaryLogsOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/__init__.py new file mode 100644 index 000000000000..0af9b28f6607 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/serialization.py similarity index 82% rename from sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py rename to sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/serialization.py index 8139854b97bb..ff543ed937ff 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_serialization.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_utils/serialization.py @@ -1,30 +1,12 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -39,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -48,11 +29,7 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, - List, - Mapping, ) try: @@ -62,13 +39,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self from azure.core.exceptions import DeserializationError, SerializationError from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -91,6 +68,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -112,7 +91,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: @@ -155,6 +134,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -179,80 +163,31 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0.""" - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation.""" - return "Z" - - def dst(self, dt): - """No daylight saving for UTC.""" - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset): - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): @@ -300,13 +242,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -326,7 +278,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -346,12 +302,14 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -380,12 +338,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -395,30 +356,31 @@ def _infer_class_models(cls): client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -426,9 +388,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -448,21 +412,25 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None if not isinstance(response, ET.Element): rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: @@ -501,11 +469,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer: # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -540,7 +510,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -556,17 +526,20 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -592,17 +565,19 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = target_obj._create_xml_node() # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): continue if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -633,7 +608,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -664,17 +640,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) raise SerializationError(msg) from err - else: - return serialized + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -703,7 +679,7 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: raise SerializationError("Unable to build a model: " + str(err)) from err @@ -712,11 +688,13 @@ def body(self, data, data_type, **kwargs): def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -728,21 +706,20 @@ def url(self, name, data, data_type, **kwargs): output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator @@ -759,19 +736,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -780,21 +758,20 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") @@ -805,12 +782,12 @@ def serialize_data(self, data, data_type, **kwargs): if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -821,11 +798,10 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -841,23 +817,26 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + return eval(data_type)(data) # nosec # pylint: disable=eval-used @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -871,8 +850,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -882,15 +860,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -945,9 +921,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -971,7 +946,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -979,6 +954,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -1003,7 +979,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1034,56 +1010,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1091,11 +1072,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1105,30 +1087,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1141,12 +1125,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1172,13 +1157,14 @@ def serialize_iso(attr, **kwargs): raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1186,17 +1172,17 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1211,7 +1197,9 @@ def rest_key_extractor(attr, attr_desc, data): return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1232,17 +1220,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1279,7 +1279,7 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1331,22 +1331,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1354,7 +1353,7 @@ def xml_key_extractor(attr, attr_desc, data): return children[0] -class Deserializer(object): +class Deserializer: """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. @@ -1363,9 +1362,9 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1385,7 +1384,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1401,27 +1400,29 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1440,13 +1441,13 @@ def _deserialize(self, target_obj, data): if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1476,9 +1477,8 @@ def _deserialize(self, target_obj, data): except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1505,6 +1505,8 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None @@ -1516,7 +1518,7 @@ def _classify_target(self, target, data): return target, target try: - target = target._classify(data, self.dependencies) # type: ignore + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1531,10 +1533,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1552,10 +1556,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1579,24 +1585,35 @@ def _unpack_content(raw_data, content_type=None): def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: - response_obj.additional_properties = additional_properties + response_obj.additional_properties = additional_properties # type: ignore return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1605,15 +1622,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1627,7 +1645,11 @@ def deserialize_data(self, data, data_type): if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) @@ -1647,14 +1669,14 @@ def deserialize_data(self, data, data_type): msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1671,6 +1693,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1681,13 +1704,14 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None @@ -1720,11 +1744,10 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1732,8 +1755,9 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1743,24 +1767,23 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, str): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + return eval(data_type)(attr) # nosec # pylint: disable=eval-used @staticmethod def deserialize_unicode(data): @@ -1768,6 +1791,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1781,8 +1805,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1794,6 +1817,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1804,9 +1828,9 @@ def deserialize_enum(data, enum_obj): # Workaround. We might consider remove it in the future. try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1822,8 +1846,9 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1834,8 +1859,9 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1849,8 +1875,9 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal - :raises: DeserializationError if string format invalid. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text @@ -1865,8 +1892,9 @@ def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1877,8 +1905,9 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1887,16 +1916,16 @@ def deserialize_duration(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise DeserializationError(msg) from err - else: - return duration + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1910,8 +1939,9 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1924,31 +1954,32 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1976,8 +2007,7 @@ def deserialize_iso(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_unix(attr): @@ -1985,8 +2015,9 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore @@ -1996,5 +2027,4 @@ def deserialize_unix(attr): except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py index e43de950c09c..9bd4c9b55137 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "13.0.0b7" +VERSION = "0.0.0" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/__init__.py index 39a87b28a9bc..f8722e6aa42d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/__init__.py @@ -5,12 +5,18 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._log_analytics_management_client import LogAnalyticsManagementClient +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._log_analytics_management_client import LogAnalyticsManagementClient # type: ignore try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk @@ -18,6 +24,6 @@ __all__ = [ "LogAnalyticsManagementClient", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py index 7d70ad5d4b55..2e5b27b3c1e9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy @@ -14,7 +14,7 @@ from .._version import VERSION if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports + from azure.core import AzureClouds from azure.core.credentials_async import AsyncTokenCredential @@ -28,9 +28,23 @@ class LogAnalyticsManagementClientConfiguration: # pylint: disable=too-many-ins :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: Api Version. Default value is "2025-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2025-07-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -38,6 +52,8 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k self.credential = credential self.subscription_id = subscription_id + self.cloud_setting = cloud_setting + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-loganalytics/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py index 12fcfbaab165..261354ce9547 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/_log_analytics_management_client.py @@ -7,16 +7,18 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable, TYPE_CHECKING +from typing import Any, Awaitable, Optional, TYPE_CHECKING, cast from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.settings import settings from azure.mgmt.core import AsyncARMPipelineClient from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints from .. import models as _models -from .._serialization import Deserializer, Serializer +from .._utils.serialization import Deserializer, Serializer from ._configuration import LogAnalyticsManagementClientConfiguration from .operations import ( AvailableServiceTiersOperations, @@ -37,6 +39,7 @@ SchemaOperations, SharedKeysOperations, StorageInsightConfigsOperations, + SummaryLogsOperations, TablesOperations, UsagesOperations, WorkspacePurgeOperations, @@ -44,21 +47,24 @@ ) if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports + from azure.core import AzureClouds from azure.core.credentials_async import AsyncTokenCredential -class LogAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes +class LogAnalyticsManagementClient: # pylint: disable=too-many-instance-attributes """Operational Insights Client. - :ivar query_packs: QueryPacksOperations operations - :vartype query_packs: azure.mgmt.loganalytics.aio.operations.QueryPacksOperations - :ivar queries: QueriesOperations operations - :vartype queries: azure.mgmt.loganalytics.aio.operations.QueriesOperations + :ivar available_service_tiers: AvailableServiceTiersOperations operations + :vartype available_service_tiers: + azure.mgmt.loganalytics.aio.operations.AvailableServiceTiersOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: azure.mgmt.loganalytics.aio.operations.ClustersOperations :ivar data_exports: DataExportsOperations operations :vartype data_exports: azure.mgmt.loganalytics.aio.operations.DataExportsOperations :ivar data_sources: DataSourcesOperations operations :vartype data_sources: azure.mgmt.loganalytics.aio.operations.DataSourcesOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: azure.mgmt.loganalytics.aio.operations.GatewaysOperations :ivar intelligence_packs: IntelligencePacksOperations operations :vartype intelligence_packs: azure.mgmt.loganalytics.aio.operations.IntelligencePacksOperations :ivar linked_services: LinkedServicesOperations operations @@ -68,42 +74,47 @@ class LogAnalyticsManagementClient: # pylint: disable=client-accepts-api-versio azure.mgmt.loganalytics.aio.operations.LinkedStorageAccountsOperations :ivar management_groups: ManagementGroupsOperations operations :vartype management_groups: azure.mgmt.loganalytics.aio.operations.ManagementGroupsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.loganalytics.aio.operations.Operations :ivar operation_statuses: OperationStatusesOperations operations :vartype operation_statuses: azure.mgmt.loganalytics.aio.operations.OperationStatusesOperations + :ivar queries: QueriesOperations operations + :vartype queries: azure.mgmt.loganalytics.aio.operations.QueriesOperations + :ivar query_packs: QueryPacksOperations operations + :vartype query_packs: azure.mgmt.loganalytics.aio.operations.QueryPacksOperations + :ivar saved_searches: SavedSearchesOperations operations + :vartype saved_searches: azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations + :ivar schema: SchemaOperations operations + :vartype schema: azure.mgmt.loganalytics.aio.operations.SchemaOperations :ivar shared_keys: SharedKeysOperations operations :vartype shared_keys: azure.mgmt.loganalytics.aio.operations.SharedKeysOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.loganalytics.aio.operations.UsagesOperations :ivar storage_insight_configs: StorageInsightConfigsOperations operations :vartype storage_insight_configs: azure.mgmt.loganalytics.aio.operations.StorageInsightConfigsOperations - :ivar saved_searches: SavedSearchesOperations operations - :vartype saved_searches: azure.mgmt.loganalytics.aio.operations.SavedSearchesOperations - :ivar available_service_tiers: AvailableServiceTiersOperations operations - :vartype available_service_tiers: - azure.mgmt.loganalytics.aio.operations.AvailableServiceTiersOperations - :ivar gateways: GatewaysOperations operations - :vartype gateways: azure.mgmt.loganalytics.aio.operations.GatewaysOperations - :ivar schema: SchemaOperations operations - :vartype schema: azure.mgmt.loganalytics.aio.operations.SchemaOperations + :ivar tables: TablesOperations operations + :vartype tables: azure.mgmt.loganalytics.aio.operations.TablesOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.loganalytics.aio.operations.UsagesOperations :ivar workspace_purge: WorkspacePurgeOperations operations :vartype workspace_purge: azure.mgmt.loganalytics.aio.operations.WorkspacePurgeOperations - :ivar clusters: ClustersOperations operations - :vartype clusters: azure.mgmt.loganalytics.aio.operations.ClustersOperations - :ivar operations: Operations operations - :vartype operations: azure.mgmt.loganalytics.aio.operations.Operations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.loganalytics.aio.operations.WorkspacesOperations :ivar deleted_workspaces: DeletedWorkspacesOperations operations :vartype deleted_workspaces: azure.mgmt.loganalytics.aio.operations.DeletedWorkspacesOperations - :ivar tables: TablesOperations operations - :vartype tables: azure.mgmt.loganalytics.aio.operations.TablesOperations + :ivar summary_logs: SummaryLogsOperations operations + :vartype summary_logs: azure.mgmt.loganalytics.aio.operations.SummaryLogsOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param base_url: Service URL. Default value is "https://management.azure.com". + :param base_url: Service URL. Default value is None. :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: Api Version. Default value is "2025-07-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -112,12 +123,24 @@ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, - base_url: str = "https://management.azure.com", + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, **kwargs: Any ) -> None: + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) self._config = LogAnalyticsManagementClientConfiguration( - credential=credential, subscription_id=subscription_id, **kwargs + credential=credential, + subscription_id=subscription_id, + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs ) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -136,16 +159,21 @@ def __init__( policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( + base_url=cast(str, base_url), policies=_policies, **kwargs + ) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.query_packs = QueryPacksOperations(self._client, self._config, self._serialize, self._deserialize) - self.queries = QueriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.available_service_tiers = AvailableServiceTiersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.data_exports = DataExportsOperations(self._client, self._config, self._serialize, self._deserialize) self.data_sources = DataSourcesOperations(self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) self.intelligence_packs = IntelligencePacksOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -156,28 +184,26 @@ def __init__( self.management_groups = ManagementGroupsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.operation_statuses = OperationStatusesOperations( self._client, self._config, self._serialize, self._deserialize ) + self.queries = QueriesOperations(self._client, self._config, self._serialize, self._deserialize) + self.query_packs = QueryPacksOperations(self._client, self._config, self._serialize, self._deserialize) + self.saved_searches = SavedSearchesOperations(self._client, self._config, self._serialize, self._deserialize) + self.schema = SchemaOperations(self._client, self._config, self._serialize, self._deserialize) self.shared_keys = SharedKeysOperations(self._client, self._config, self._serialize, self._deserialize) - self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.storage_insight_configs = StorageInsightConfigsOperations( self._client, self._config, self._serialize, self._deserialize ) - self.saved_searches = SavedSearchesOperations(self._client, self._config, self._serialize, self._deserialize) - self.available_service_tiers = AvailableServiceTiersOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.gateways = GatewaysOperations(self._client, self._config, self._serialize, self._deserialize) - self.schema = SchemaOperations(self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.workspace_purge = WorkspacePurgeOperations(self._client, self._config, self._serialize, self._deserialize) - self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) self.deleted_workspaces = DeletedWorkspacesOperations( self._client, self._config, self._serialize, self._deserialize ) - self.tables = TablesOperations(self._client, self._config, self._serialize, self._deserialize) + self.summary_logs = SummaryLogsOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py index 7364a7f5d3b9..501c3ecac6e4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/__init__.py @@ -5,57 +5,65 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._query_packs_operations import QueryPacksOperations -from ._queries_operations import QueriesOperations -from ._data_exports_operations import DataExportsOperations -from ._data_sources_operations import DataSourcesOperations -from ._intelligence_packs_operations import IntelligencePacksOperations -from ._linked_services_operations import LinkedServicesOperations -from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations -from ._management_groups_operations import ManagementGroupsOperations -from ._operation_statuses_operations import OperationStatusesOperations -from ._shared_keys_operations import SharedKeysOperations -from ._usages_operations import UsagesOperations -from ._storage_insight_configs_operations import StorageInsightConfigsOperations -from ._saved_searches_operations import SavedSearchesOperations -from ._available_service_tiers_operations import AvailableServiceTiersOperations -from ._gateways_operations import GatewaysOperations -from ._schema_operations import SchemaOperations -from ._workspace_purge_operations import WorkspacePurgeOperations -from ._clusters_operations import ClustersOperations -from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations -from ._deleted_workspaces_operations import DeletedWorkspacesOperations -from ._tables_operations import TablesOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._available_service_tiers_operations import AvailableServiceTiersOperations # type: ignore +from ._clusters_operations import ClustersOperations # type: ignore +from ._data_exports_operations import DataExportsOperations # type: ignore +from ._data_sources_operations import DataSourcesOperations # type: ignore +from ._gateways_operations import GatewaysOperations # type: ignore +from ._intelligence_packs_operations import IntelligencePacksOperations # type: ignore +from ._linked_services_operations import LinkedServicesOperations # type: ignore +from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations # type: ignore +from ._management_groups_operations import ManagementGroupsOperations # type: ignore +from ._operations import Operations # type: ignore +from ._operation_statuses_operations import OperationStatusesOperations # type: ignore +from ._queries_operations import QueriesOperations # type: ignore +from ._query_packs_operations import QueryPacksOperations # type: ignore +from ._saved_searches_operations import SavedSearchesOperations # type: ignore +from ._schema_operations import SchemaOperations # type: ignore +from ._shared_keys_operations import SharedKeysOperations # type: ignore +from ._storage_insight_configs_operations import StorageInsightConfigsOperations # type: ignore +from ._tables_operations import TablesOperations # type: ignore +from ._usages_operations import UsagesOperations # type: ignore +from ._workspace_purge_operations import WorkspacePurgeOperations # type: ignore +from ._workspaces_operations import WorkspacesOperations # type: ignore +from ._deleted_workspaces_operations import DeletedWorkspacesOperations # type: ignore +from ._summary_logs_operations import SummaryLogsOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "QueryPacksOperations", - "QueriesOperations", + "AvailableServiceTiersOperations", + "ClustersOperations", "DataExportsOperations", "DataSourcesOperations", + "GatewaysOperations", "IntelligencePacksOperations", "LinkedServicesOperations", "LinkedStorageAccountsOperations", "ManagementGroupsOperations", + "Operations", "OperationStatusesOperations", - "SharedKeysOperations", - "UsagesOperations", - "StorageInsightConfigsOperations", + "QueriesOperations", + "QueryPacksOperations", "SavedSearchesOperations", - "AvailableServiceTiersOperations", - "GatewaysOperations", "SchemaOperations", + "SharedKeysOperations", + "StorageInsightConfigsOperations", + "TablesOperations", + "UsagesOperations", "WorkspacePurgeOperations", - "ClustersOperations", - "Operations", "WorkspacesOperations", "DeletedWorkspacesOperations", - "TablesOperations", + "SummaryLogsOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py index 92aec8106a28..684c2899e038 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_available_service_tiers_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._available_service_tiers_operations import build_list_by_workspace_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class AvailableServiceTiersOperations: @@ -48,10 +47,12 @@ class AvailableServiceTiersOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def list_by_workspace( @@ -68,7 +69,7 @@ async def list_by_workspace( :rtype: list[~azure.mgmt.loganalytics.models.AvailableServiceTier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -79,7 +80,7 @@ async def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[List[_models.AvailableServiceTier]] = kwargs.pop("cls", None) _request = build_list_by_workspace_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py index bcb536061b23..5a4a0df4d404 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_clusters_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,6 +32,7 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._clusters_operations import ( build_create_or_update_request, build_delete_request, @@ -39,13 +41,11 @@ build_list_request, build_update_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class ClustersOperations: @@ -62,13 +62,15 @@ class ClustersOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Cluster"]: """Gets Log Analytics clusters in a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,10 +83,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -105,7 +107,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -128,7 +141,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -136,7 +152,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Cluster"]: """Gets the Log Analytics clusters in a subscription. :return: An iterator like instance of either Cluster or the result of cls(response) @@ -146,10 +162,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -169,7 +185,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -192,7 +219,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -202,7 +232,7 @@ async def get_next(next_link=None): async def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -213,7 +243,7 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -246,19 +276,29 @@ async def _create_or_update_initial( response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: try: await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -342,7 +382,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -387,7 +427,7 @@ def get_long_running_output(pipeline_response): ) async def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -398,7 +438,7 @@ async def _delete_initial(self, resource_group_name: str, cluster_name: str, **k _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -425,7 +465,10 @@ async def _delete_initial(self, resource_group_name: str, cluster_name: str, **k except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -451,7 +494,7 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -501,7 +544,7 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) :rtype: ~azure.mgmt.loganalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -512,7 +555,7 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) _request = build_get_request( @@ -534,7 +577,10 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Cluster", pipeline_response.http_response) @@ -551,7 +597,7 @@ async def _update_initial( parameters: Union[_models.ClusterPatch, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -562,7 +608,7 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -595,19 +641,29 @@ async def _update_initial( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: try: await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -693,7 +749,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py index 12188cb7a4b2..505437306f07 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_exports_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,19 +28,18 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._data_exports_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class DataExportsOperations: @@ -56,15 +56,17 @@ class DataExportsOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.DataExport"]: + ) -> AsyncItemPaged["_models.DataExport"]: """Lists the data export instances within a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -79,10 +81,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataExportListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -104,7 +106,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -127,7 +140,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -219,7 +235,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -230,7 +246,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) @@ -265,7 +281,10 @@ async def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("DataExport", pipeline_response.http_response) @@ -292,7 +311,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -303,7 +322,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) _request = build_get_request( @@ -326,7 +345,10 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("DataExport", pipeline_response.http_response) @@ -337,9 +359,7 @@ async def get( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, data_export_name: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, workspace_name: str, data_export_name: str, **kwargs: Any) -> None: """Deletes the specified data export in a given workspace.. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -353,7 +373,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -364,7 +384,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -387,7 +407,10 @@ async def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 404]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py index 836f3487f420..d69e48ede5e5 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_data_sources_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,19 +28,18 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._data_sources_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class DataSourcesOperations: @@ -56,10 +56,12 @@ class DataSourcesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload async def create_or_update( @@ -146,7 +148,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -157,7 +159,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) @@ -202,9 +204,7 @@ async def create_or_update( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, data_source_name: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, workspace_name: str, data_source_name: str, **kwargs: Any) -> None: """Deletes a data source instance. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -218,7 +218,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -229,7 +229,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -274,7 +274,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -285,7 +285,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) _request = build_get_request( @@ -320,7 +320,7 @@ async def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, filter: str, skiptoken: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.DataSource"]: + ) -> AsyncItemPaged["_models.DataSource"]: """Gets the first page of data source instances in a workspace with the link to the next page. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -340,10 +340,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataSourceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -367,7 +367,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py index 483f6f97587a..7ad97edeb858 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_deleted_workspaces_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -25,14 +26,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._deleted_workspaces_operations import build_list_by_resource_group_request, build_list_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class DeletedWorkspacesOperations: @@ -49,13 +49,15 @@ class DeletedWorkspacesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: """Gets recently deleted workspaces in a subscription, available for recovery. :return: An iterator like instance of either Workspace or the result of cls(response) @@ -65,10 +67,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -88,7 +90,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -111,7 +124,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -119,7 +135,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: """Gets recently deleted workspaces in a resource group, available for recovery. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -132,10 +148,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -156,7 +172,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -179,7 +206,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py index 67f55f918558..3c6d467adf8c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_gateways_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._gateways_operations import build_delete_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class GatewaysOperations: @@ -48,15 +47,15 @@ class GatewaysOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, gateway_id: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, workspace_name: str, gateway_id: str, **kwargs: Any) -> None: """Delete a Log Analytics gateway. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -70,7 +69,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -81,7 +80,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py index adfc3b321813..058eae2203e7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_intelligence_packs_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._intelligence_packs_operations import build_disable_request, build_enable_request, build_list_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class IntelligencePacksOperations: @@ -48,13 +47,15 @@ class IntelligencePacksOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def disable( # pylint: disable=inconsistent-return-statements + async def disable( self, resource_group_name: str, workspace_name: str, intelligence_pack_name: str, **kwargs: Any ) -> None: """Disables an intelligence pack for a given workspace. @@ -70,7 +71,7 @@ async def disable( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -81,7 +82,7 @@ async def disable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_disable_request( @@ -110,7 +111,7 @@ async def disable( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async - async def enable( # pylint: disable=inconsistent-return-statements + async def enable( self, resource_group_name: str, workspace_name: str, intelligence_pack_name: str, **kwargs: Any ) -> None: """Enables an intelligence pack for a given workspace. @@ -126,7 +127,7 @@ async def enable( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -137,7 +138,7 @@ async def enable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_enable_request( @@ -181,7 +182,7 @@ async def list( :rtype: list[~azure.mgmt.loganalytics.models.IntelligencePack] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -192,7 +193,7 @@ async def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[List[_models.IntelligencePack]] = kwargs.pop("cls", None) _request = build_list_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py index fb464b3a9f0e..aeb82711f1f9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_services_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,19 +32,18 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._linked_services_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class LinkedServicesOperations: @@ -60,10 +60,12 @@ class LinkedServicesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") async def _create_or_update_initial( self, @@ -73,7 +75,7 @@ async def _create_or_update_initial( parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -84,7 +86,7 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -224,7 +226,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -272,7 +274,7 @@ def get_long_running_output(pipeline_response): async def _delete_initial( self, resource_group_name: str, workspace_name: str, linked_service_name: str, **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -283,7 +285,7 @@ async def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -341,7 +343,7 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -400,7 +402,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.LinkedService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -411,7 +413,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) _request = build_get_request( @@ -446,7 +448,7 @@ async def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.LinkedService"]: + ) -> AsyncItemPaged["_models.LinkedService"]: """Gets the linked services instances in a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -461,10 +463,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedServiceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -486,7 +488,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py index 528e9c15a8cc..6682608d8564 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_linked_storage_accounts_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,19 +28,18 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._linked_storage_accounts_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class LinkedStorageAccountsOperations: @@ -56,10 +56,12 @@ class LinkedStorageAccountsOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload async def create_or_update( @@ -154,7 +156,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -165,7 +167,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) @@ -210,7 +212,7 @@ async def create_or_update( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements + async def delete( self, resource_group_name: str, workspace_name: str, @@ -232,7 +234,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -243,7 +245,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -294,7 +296,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -305,7 +307,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) _request = build_get_request( @@ -340,7 +342,7 @@ async def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.LinkedStorageAccountsResource"]: + ) -> AsyncItemPaged["_models.LinkedStorageAccountsResource"]: """Gets all linked storage accounts associated with the specified workspace, storage accounts will be sorted by their data source type. @@ -358,10 +360,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedStorageAccountsListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -383,7 +385,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py index f182f16eb10e..345dd65a077b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_management_groups_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -25,14 +26,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._management_groups_operations import build_list_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class ManagementGroupsOperations: @@ -49,15 +49,17 @@ class ManagementGroupsOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.ManagementGroup"]: + ) -> AsyncItemPaged["_models.ManagementGroup"]: """Gets a list of management groups connected to a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -73,10 +75,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListManagementGroupsResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -98,7 +100,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py index b19817fea735..982a1f179576 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operation_statuses_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._operation_statuses_operations import build_get_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class OperationStatusesOperations: @@ -48,10 +47,12 @@ class OperationStatusesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models.OperationStatus: @@ -65,7 +66,7 @@ async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _m :rtype: ~azure.mgmt.loganalytics.models.OperationStatus :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -76,7 +77,7 @@ async def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _m _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None) _request = build_get_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py index 18f251fe07b9..fd389551c813 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -25,14 +26,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._operations import build_list_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class Operations: @@ -49,13 +49,15 @@ class Operations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: """Lists all of the available OperationalInsights Rest API operations. :return: An iterator like instance of either Operation or the result of cls(response) @@ -65,10 +67,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -87,7 +89,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -110,7 +123,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py index ee4c05d9271e..06dab05ba604 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_queries_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,6 +28,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._queries_operations import ( build_delete_request, build_get_request, @@ -35,13 +37,11 @@ build_search_request, build_update_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class QueriesOperations: @@ -58,10 +58,12 @@ class QueriesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -72,7 +74,7 @@ def list( include_body: Optional[bool] = None, skip_token: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> AsyncItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Gets a list of Queries defined within a Log Analytics QueryPack. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -97,10 +99,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -125,7 +127,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -148,7 +161,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -167,7 +183,7 @@ def search( *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncIterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> AsyncItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -210,7 +226,7 @@ def search( *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncIterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> AsyncItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -250,7 +266,7 @@ def search( include_body: Optional[bool] = None, skip_token: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> AsyncItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -281,11 +297,11 @@ def search( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -320,7 +336,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -343,7 +370,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -367,7 +397,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -378,7 +408,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) _request = build_get_request( @@ -401,7 +431,10 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -499,7 +532,7 @@ async def put( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -510,7 +543,7 @@ async def put( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) @@ -545,7 +578,10 @@ async def put( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -643,7 +679,7 @@ async def update( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -654,7 +690,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) @@ -689,7 +725,10 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -700,9 +739,7 @@ async def update( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, query_pack_name: str, id: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, query_pack_name: str, id: str, **kwargs: Any) -> None: """Deletes a specific Query defined within an Log Analytics QueryPack. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -716,7 +753,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -727,7 +764,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -750,7 +787,10 @@ async def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py index 67fa93e86b11..063a092d6262 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_query_packs_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,6 +28,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._query_packs_operations import ( build_create_or_update_request, build_create_or_update_without_name_request, @@ -36,13 +38,11 @@ build_list_request, build_update_tags_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class QueryPacksOperations: @@ -59,13 +59,15 @@ class QueryPacksOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.LogAnalyticsQueryPack"]: """Gets a list of all Log Analytics QueryPacks within a subscription. :return: An iterator like instance of either LogAnalyticsQueryPack or the result of @@ -77,10 +79,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -100,7 +102,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -123,7 +136,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -133,7 +149,7 @@ async def get_next(next_link=None): @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any - ) -> AsyncIterable["_models.LogAnalyticsQueryPack"]: + ) -> AsyncItemPaged["_models.LogAnalyticsQueryPack"]: """Gets a list of Log Analytics QueryPacks within a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -148,10 +164,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -172,7 +188,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -195,7 +222,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -276,7 +306,7 @@ async def create_or_update_without_name( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -287,7 +317,7 @@ async def create_or_update_without_name( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -320,7 +350,10 @@ async def create_or_update_without_name( if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -331,9 +364,7 @@ async def create_or_update_without_name( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, query_pack_name: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> None: """Deletes a Log Analytics QueryPack. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -345,7 +376,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -356,7 +387,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -378,7 +409,10 @@ async def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -397,7 +431,7 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -408,7 +442,7 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) _request = build_get_request( @@ -430,7 +464,10 @@ async def get(self, resource_group_name: str, query_pack_name: str, **kwargs: An if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -523,7 +560,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -534,7 +571,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -568,7 +605,10 @@ async def create_or_update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -654,7 +694,7 @@ async def update_tags( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -665,7 +705,7 @@ async def update_tags( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -699,7 +739,10 @@ async def update_tags( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py index 1bbf27a480ba..f8d8c7b91e84 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_saved_searches_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,19 +25,18 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._saved_searches_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class SavedSearchesOperations: @@ -54,15 +53,15 @@ class SavedSearchesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, saved_search_id: str, **kwargs: Any - ) -> None: + async def delete(self, resource_group_name: str, workspace_name: str, saved_search_id: str, **kwargs: Any) -> None: """Deletes the specified saved search in a given workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,7 +75,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -87,7 +86,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -200,7 +199,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -211,7 +210,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) @@ -272,7 +271,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -283,7 +282,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) _request = build_get_request( @@ -330,7 +329,7 @@ async def list_by_workspace( :rtype: ~azure.mgmt.loganalytics.models.SavedSearchesListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -341,7 +340,7 @@ async def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SavedSearchesListResult] = kwargs.pop("cls", None) _request = build_list_by_workspace_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py index fb7220cd78cf..4120ef1a0f2b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_schema_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._schema_operations import build_get_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class SchemaOperations: @@ -48,10 +47,12 @@ class SchemaOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -68,7 +69,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.SearchGetSchemaResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -79,7 +80,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SearchGetSchemaResponse] = kwargs.pop("cls", None) _request = build_get_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py index 67c955511b62..2a133a9753ad 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_shared_keys_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +24,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._shared_keys_operations import build_get_shared_keys_request, build_regenerate_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class SharedKeysOperations: @@ -48,10 +47,12 @@ class SharedKeysOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.SharedKeys: @@ -66,7 +67,7 @@ async def get_shared_keys(self, resource_group_name: str, workspace_name: str, * :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -77,7 +78,7 @@ async def get_shared_keys(self, resource_group_name: str, workspace_name: str, * _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) _request = build_get_shared_keys_request( @@ -122,7 +123,7 @@ async def regenerate(self, resource_group_name: str, workspace_name: str, **kwar :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -133,7 +134,7 @@ async def regenerate(self, resource_group_name: str, workspace_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) _request = build_regenerate_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py index 7a50bfcce3d3..ed3a826b4bb9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_storage_insight_configs_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -27,19 +28,18 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._storage_insight_configs_operations import ( build_create_or_update_request, build_delete_request, build_get_request, build_list_by_workspace_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class StorageInsightConfigsOperations: @@ -56,10 +56,12 @@ class StorageInsightConfigsOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload async def create_or_update( @@ -146,7 +148,7 @@ async def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -157,7 +159,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) @@ -218,7 +220,7 @@ async def get( :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -229,7 +231,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) _request = build_get_request( @@ -262,7 +264,7 @@ async def get( return deserialized # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements + async def delete( self, resource_group_name: str, workspace_name: str, storage_insight_name: str, **kwargs: Any ) -> None: """Deletes a storageInsightsConfigs resource. @@ -278,7 +280,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -289,7 +291,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -320,7 +322,7 @@ async def delete( # pylint: disable=inconsistent-return-statements @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.StorageInsight"]: + ) -> AsyncItemPaged["_models.StorageInsight"]: """Lists the storage insight instances within a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -335,10 +337,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StorageInsightListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -360,7 +362,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_summary_logs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_summary_logs_operations.py new file mode 100644 index 000000000000..028c0d410a81 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_summary_logs_operations.py @@ -0,0 +1,959 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer +from ...operations._summary_logs_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_workspace_request, + build_retry_bin_request, + build_start_request, + build_stop_request, +) +from .._configuration import LogAnalyticsManagementClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list + + +class SummaryLogsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.loganalytics.aio.LogAnalyticsManagementClient`'s + :attr:`summary_logs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.SummaryLogs"]: + """Gets all summary rules for the specified Log Analytics workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An iterator like instance of either SummaryLogs or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SummaryLogsListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SummaryLogsListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogs, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "SummaryLogs") + + _request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: _models.SummaryLogs, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogs + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogs, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Is either a + SummaryLogs type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogs or IO[bytes] + :return: An instance of AsyncLROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.SummaryLogs] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = self._deserialize("SummaryLogs", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.SummaryLogs].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.SummaryLogs]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> _models.SummaryLogs: + """Gets Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: SummaryLogs or the result of cls(response) + :rtype: ~azure.mgmt.loganalytics.models.SummaryLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SummaryLogs] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SummaryLogs", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _delete_initial( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _start_initial( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_start_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_start( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Starts an inactive Summary rule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def stop(self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any) -> None: + """Stops an active Summary rule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_stop_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + async def _retry_bin_initial( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogsRetryBin, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "SummaryLogsRetryBin") + + _request = build_retry_bin_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: _models.SummaryLogsRetryBin, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBin + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogsRetryBin, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Is either a + SummaryLogsRetryBin type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBin or IO[bytes] + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._retry_bin_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py index 1189e01c9d95..81b560102861 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_tables_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,6 +32,7 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._tables_operations import ( build_cancel_search_request, build_create_or_update_request, @@ -40,13 +42,11 @@ build_migrate_request, build_update_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class TablesOperations: @@ -63,15 +63,17 @@ class TablesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.Table"]: + ) -> AsyncItemPaged["_models.Table"]: """Gets all the tables for the specified Log Analytics workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -86,10 +88,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TablesListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -111,7 +113,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -134,7 +147,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -149,7 +165,7 @@ async def _create_or_update_initial( parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -160,7 +176,7 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -200,7 +216,10 @@ async def _create_or_update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -298,7 +317,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -354,7 +373,7 @@ async def _update_initial( parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -365,7 +384,7 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -405,7 +424,10 @@ async def _update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -503,7 +525,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -566,7 +588,7 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s :rtype: ~azure.mgmt.loganalytics.models.Table :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -577,7 +599,7 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) _request = build_get_request( @@ -600,7 +622,10 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Table", pipeline_response.http_response) @@ -613,7 +638,7 @@ async def get(self, resource_group_name: str, workspace_name: str, table_name: s async def _delete_initial( self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -624,7 +649,7 @@ async def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -652,13 +677,23 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -682,7 +717,7 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -724,9 +759,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace_async - async def migrate( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any - ) -> None: + async def migrate(self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any) -> None: """Migrate a Log Analytics table from support of the Data Collector API and Custom Fields features to support of Data Collection Rule-based Custom Logs. @@ -741,7 +774,7 @@ async def migrate( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -752,7 +785,7 @@ async def migrate( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_migrate_request( @@ -775,14 +808,17 @@ async def migrate( # pylint: disable=inconsistent-return-statements if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async - async def cancel_search( # pylint: disable=inconsistent-return-statements + async def cancel_search( self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any ) -> None: """Cancel a log analytics workspace search results table query run. @@ -798,7 +834,7 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -809,7 +845,7 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_cancel_search_request( @@ -832,7 +868,10 @@ async def cancel_search( # pylint: disable=inconsistent-return-statements if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py index 380bde0cefff..e606c94ac39a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_usages_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -25,14 +26,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._usages_operations import build_list_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class UsagesOperations: @@ -49,15 +49,17 @@ class UsagesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncIterable["_models.UsageMetric"]: + ) -> AsyncItemPaged["_models.UsageMetric"]: """Gets a list of usage metrics for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -72,10 +74,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListUsagesResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -97,7 +99,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py index f988193e51e6..5ba93de388d9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspace_purge_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +25,13 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._workspace_purge_operations import build_get_purge_status_request, build_purge_request +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class WorkspacePurgeOperations: @@ -49,10 +48,12 @@ class WorkspacePurgeOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload async def purge( @@ -159,7 +160,7 @@ async def purge( :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -170,7 +171,7 @@ async def purge( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspacePurgeResponse] = kwargs.pop("cls", None) @@ -236,7 +237,7 @@ async def get_purge_status( :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -247,7 +248,7 @@ async def get_purge_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspacePurgeStatusResponse] = kwargs.pop("cls", None) _request = build_get_purge_status_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py index 3dd7bac43089..dbc1e02206e6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/aio/operations/_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,21 +33,25 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models +from ..._utils.serialization import Deserializer, Serializer from ...operations._workspaces_operations import ( build_create_or_update_request, build_delete_request, + build_failback_request, + build_failover_request, + build_get_nsp_request, build_get_request, build_list_by_resource_group_request, + build_list_nsp_request, build_list_request, + build_reconcile_nsp_request, build_update_request, ) +from .._configuration import LogAnalyticsManagementClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list class WorkspacesOperations: @@ -62,13 +68,15 @@ class WorkspacesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: """Gets the workspaces in a subscription. :return: An iterator like instance of either Workspace or the result of cls(response) @@ -78,10 +86,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -101,7 +109,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -124,7 +143,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -132,7 +154,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.Workspace"]: """Gets workspaces in a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -145,10 +167,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -169,7 +191,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -192,7 +225,10 @@ async def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -206,7 +242,7 @@ async def _create_or_update_initial( parameters: Union[_models.Workspace, IO[bytes]], **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -217,7 +253,7 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) @@ -256,7 +292,10 @@ async def _create_or_update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -348,7 +387,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -395,7 +434,7 @@ def get_long_running_output(pipeline_response): async def _delete_initial( self, resource_group_name: str, workspace_name: str, force: Optional[bool] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -406,7 +445,7 @@ async def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -434,7 +473,10 @@ async def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -468,7 +510,7 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -519,7 +561,7 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -530,7 +572,7 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) _request = build_get_request( @@ -552,7 +594,10 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Workspace", pipeline_response.http_response) @@ -638,7 +683,7 @@ async def update( :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -649,7 +694,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) @@ -683,7 +728,10 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Workspace", pipeline_response.http_response) @@ -692,3 +740,529 @@ async def update( return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + + async def _failover_initial( + self, resource_group_name: str, location: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_failover_request( + resource_group_name=resource_group_name, + location=location, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_failover( + self, resource_group_name: str, location: str, workspace_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Activates failover for the specified workspace. + + The specified replication location must match the location of the enabled replication for this + workspace. The failover operation is asynchronous and can take up to 30 minutes to complete. + The status of the operation can be checked using the operationId returned in the response. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param location: The name of the Azure region. Required. + :type location: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._failover_initial( + resource_group_name=resource_group_name, + location=location, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _failback_initial( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_failback_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_failback( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deactivates failover for the specified workspace. + + The failback operation is asynchronous and can take up to 30 minutes to complete. The status of + the operation can be checked using the operationId returned in the response. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._failback_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_nsp( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: + """Gets a list of NSP configurations for specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An iterator like instance of either NetworkSecurityPerimeterConfiguration or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NetworkSecurityPerimeterConfigurationListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("NetworkSecurityPerimeterConfigurationListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_nsp( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> _models.NetworkSecurityPerimeterConfiguration: + """Gets a network security perimeter configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param network_security_perimeter_configuration_name: The name for a network security perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: NetworkSecurityPerimeterConfiguration or the result of cls(response) + :rtype: ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) + + _request = build_get_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NetworkSecurityPerimeterConfiguration", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _reconcile_nsp_initial( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_reconcile_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_reconcile_nsp( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Reconcile network security perimeter configuration for Workspace resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param network_security_perimeter_configuration_name: The name for a network security perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._reconcile_nsp_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py index 7fedf4cf528b..74d4613aa1eb 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/__init__.py @@ -5,130 +5,177 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._models_py3 import AssociatedWorkspace -from ._models_py3 import AvailableServiceTier -from ._models_py3 import AzureEntityResource -from ._models_py3 import AzureResourceProperties -from ._models_py3 import CapacityReservationProperties -from ._models_py3 import Cluster -from ._models_py3 import ClusterListResult -from ._models_py3 import ClusterPatch -from ._models_py3 import ClusterSku -from ._models_py3 import Column -from ._models_py3 import CoreSummary -from ._models_py3 import DataExport -from ._models_py3 import DataExportListResult -from ._models_py3 import DataSource -from ._models_py3 import DataSourceFilter -from ._models_py3 import DataSourceListResult -from ._models_py3 import ErrorAdditionalInfo -from ._models_py3 import ErrorDetail -from ._models_py3 import ErrorResponse -from ._models_py3 import Identity -from ._models_py3 import IntelligencePack -from ._models_py3 import KeyVaultProperties -from ._models_py3 import LinkedService -from ._models_py3 import LinkedServiceListResult -from ._models_py3 import LinkedStorageAccountsListResult -from ._models_py3 import LinkedStorageAccountsResource -from ._models_py3 import LogAnalyticsQueryPack -from ._models_py3 import LogAnalyticsQueryPackListResult -from ._models_py3 import LogAnalyticsQueryPackQuery -from ._models_py3 import LogAnalyticsQueryPackQueryListResult -from ._models_py3 import LogAnalyticsQueryPackQueryPropertiesRelated -from ._models_py3 import LogAnalyticsQueryPackQuerySearchProperties -from ._models_py3 import LogAnalyticsQueryPackQuerySearchPropertiesRelated -from ._models_py3 import ManagedServiceIdentity -from ._models_py3 import ManagementGroup -from ._models_py3 import MetricName -from ._models_py3 import Operation -from ._models_py3 import OperationDisplay -from ._models_py3 import OperationListResult -from ._models_py3 import OperationStatus -from ._models_py3 import PrivateLinkScopedResource -from ._models_py3 import ProxyResource -from ._models_py3 import QueryPacksResource -from ._models_py3 import Resource -from ._models_py3 import RestoredLogs -from ._models_py3 import ResultStatistics -from ._models_py3 import SavedSearch -from ._models_py3 import SavedSearchesListResult -from ._models_py3 import Schema -from ._models_py3 import SearchGetSchemaResponse -from ._models_py3 import SearchMetadata -from ._models_py3 import SearchMetadataSchema -from ._models_py3 import SearchResults -from ._models_py3 import SearchSchemaValue -from ._models_py3 import SearchSort -from ._models_py3 import SharedKeys -from ._models_py3 import StorageAccount -from ._models_py3 import StorageInsight -from ._models_py3 import StorageInsightListResult -from ._models_py3 import StorageInsightStatus -from ._models_py3 import SystemData -from ._models_py3 import SystemDataAutoGenerated -from ._models_py3 import Table -from ._models_py3 import TablesListResult -from ._models_py3 import Tag -from ._models_py3 import TagsResource -from ._models_py3 import TrackedResource -from ._models_py3 import UsageMetric -from ._models_py3 import UserAssignedIdentity -from ._models_py3 import UserIdentityProperties -from ._models_py3 import Workspace -from ._models_py3 import WorkspaceCapping -from ._models_py3 import WorkspaceFeatures -from ._models_py3 import WorkspaceListManagementGroupsResult -from ._models_py3 import WorkspaceListResult -from ._models_py3 import WorkspaceListUsagesResult -from ._models_py3 import WorkspacePatch -from ._models_py3 import WorkspacePurgeBody -from ._models_py3 import WorkspacePurgeBodyFilters -from ._models_py3 import WorkspacePurgeResponse -from ._models_py3 import WorkspacePurgeStatusResponse -from ._models_py3 import WorkspaceSku +from typing import TYPE_CHECKING -from ._log_analytics_management_client_enums import BillingType -from ._log_analytics_management_client_enums import Capacity -from ._log_analytics_management_client_enums import CapacityReservationLevel -from ._log_analytics_management_client_enums import ClusterEntityStatus -from ._log_analytics_management_client_enums import ClusterSkuNameEnum -from ._log_analytics_management_client_enums import ColumnDataTypeHintEnum -from ._log_analytics_management_client_enums import ColumnTypeEnum -from ._log_analytics_management_client_enums import CreatedByType -from ._log_analytics_management_client_enums import DataIngestionStatus -from ._log_analytics_management_client_enums import DataSourceKind -from ._log_analytics_management_client_enums import DataSourceType -from ._log_analytics_management_client_enums import IdentityType -from ._log_analytics_management_client_enums import LinkedServiceEntityStatus -from ._log_analytics_management_client_enums import ManagedServiceIdentityType -from ._log_analytics_management_client_enums import ProvisioningStateEnum -from ._log_analytics_management_client_enums import PublicNetworkAccessType -from ._log_analytics_management_client_enums import PurgeState -from ._log_analytics_management_client_enums import SearchSortEnum -from ._log_analytics_management_client_enums import SkuNameEnum -from ._log_analytics_management_client_enums import SourceEnum -from ._log_analytics_management_client_enums import StorageInsightState -from ._log_analytics_management_client_enums import TablePlanEnum -from ._log_analytics_management_client_enums import TableSubTypeEnum -from ._log_analytics_management_client_enums import TableTypeEnum -from ._log_analytics_management_client_enums import Type -from ._log_analytics_management_client_enums import WorkspaceEntityStatus -from ._log_analytics_management_client_enums import WorkspaceSkuNameEnum +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models_py3 import ( # type: ignore + AccessRule, + AccessRuleProperties, + AccessRulePropertiesSubscriptionsItem, + AssociatedWorkspace, + AvailableServiceTier, + AzureEntityResource, + CapacityReservationProperties, + Cluster, + ClusterListResult, + ClusterPatch, + ClusterReplicationProperties, + ClusterSku, + Column, + CoreSummary, + DataExport, + DataExportListResult, + DataSource, + DataSourceFilter, + DataSourceListResult, + ErrorAdditionalInfo, + ErrorDetail, + ErrorDetailAutoGenerated, + ErrorDetailAutoGenerated2, + ErrorResponse, + ErrorResponseAutoGenerated, + ErrorResponseAutoGenerated2, + Identity, + IntelligencePack, + KeyVaultProperties, + LinkedService, + LinkedServiceListResult, + LinkedStorageAccountsListResult, + LinkedStorageAccountsResource, + LogAnalyticsQueryPack, + LogAnalyticsQueryPackListResult, + LogAnalyticsQueryPackQuery, + LogAnalyticsQueryPackQueryListResult, + LogAnalyticsQueryPackQueryPropertiesRelated, + LogAnalyticsQueryPackQuerySearchProperties, + LogAnalyticsQueryPackQuerySearchPropertiesRelated, + ManagedServiceIdentity, + ManagementGroup, + MetricName, + NetworkSecurityPerimeter, + NetworkSecurityPerimeterConfiguration, + NetworkSecurityPerimeterConfigurationListResult, + NetworkSecurityPerimeterConfigurationProperties, + NetworkSecurityProfile, + Operation, + OperationDisplay, + OperationListResult, + OperationStatus, + PrivateLinkScopedResource, + ProvisioningIssue, + ProvisioningIssueProperties, + ProxyResource, + ProxyResourceAutoGenerated, + Resource, + ResourceAssociation, + ResourceAutoGenerated, + RestoredLogs, + ResultStatistics, + RuleDefinition, + SavedSearch, + SavedSearchesListResult, + Schema, + SearchGetSchemaResponse, + SearchMetadata, + SearchMetadataSchema, + SearchResults, + SearchSchemaValue, + SearchSort, + SharedKeys, + StorageAccount, + StorageInsight, + StorageInsightListResult, + StorageInsightStatus, + SummaryLogs, + SummaryLogsListResult, + SummaryLogsRetryBin, + SummaryLogsRetryBinProperties, + SystemData, + Table, + TablesListResult, + Tag, + TagsResource, + TrackedResource, + TrackedResourceAutoGenerated, + UsageMetric, + UserAssignedIdentity, + UserIdentityProperties, + Workspace, + WorkspaceCapping, + WorkspaceFailoverProperties, + WorkspaceFeatures, + WorkspaceListManagementGroupsResult, + WorkspaceListResult, + WorkspaceListUsagesResult, + WorkspacePatch, + WorkspacePurgeBody, + WorkspacePurgeBodyFilters, + WorkspacePurgeResponse, + WorkspacePurgeStatusResponse, + WorkspaceReplicationPatProperties, + WorkspaceReplicationProperties, + WorkspaceSku, +) + +from ._log_analytics_management_client_enums import ( # type: ignore + AccessRuleDirection, + BillingType, + ClusterEntityStatus, + ClusterReplicationState, + ClusterSkuNameEnum, + ColumnDataTypeHintEnum, + ColumnTypeEnum, + CreatedByType, + DataIngestionStatus, + DataSourceKind, + DataSourceType, + IdentityType, + IssueType, + LinkedServiceEntityStatus, + ManagedServiceIdentityType, + NetworkSecurityPerimeterConfigurationProvisioningState, + ProvisioningStateEnum, + PublicNetworkAccessType, + PurgeState, + ResourceAssociationAccessMode, + RuleTypeEnum, + SearchSortEnum, + Severity, + SkuNameEnum, + SourceEnum, + StatusCodeEnum, + StorageInsightState, + TablePlanEnum, + TableSubTypeEnum, + TableTypeEnum, + TimeSelectorEnum, + Type, + WorkspaceEntityStatus, + WorkspaceFailoverState, + WorkspaceReplicationState, + WorkspaceSkuNameEnum, +) from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ + "AccessRule", + "AccessRuleProperties", + "AccessRulePropertiesSubscriptionsItem", "AssociatedWorkspace", "AvailableServiceTier", "AzureEntityResource", - "AzureResourceProperties", "CapacityReservationProperties", "Cluster", "ClusterListResult", "ClusterPatch", + "ClusterReplicationProperties", "ClusterSku", "Column", "CoreSummary", @@ -139,7 +186,11 @@ "DataSourceListResult", "ErrorAdditionalInfo", "ErrorDetail", + "ErrorDetailAutoGenerated", + "ErrorDetailAutoGenerated2", "ErrorResponse", + "ErrorResponseAutoGenerated", + "ErrorResponseAutoGenerated2", "Identity", "IntelligencePack", "KeyVaultProperties", @@ -157,16 +208,26 @@ "ManagedServiceIdentity", "ManagementGroup", "MetricName", + "NetworkSecurityPerimeter", + "NetworkSecurityPerimeterConfiguration", + "NetworkSecurityPerimeterConfigurationListResult", + "NetworkSecurityPerimeterConfigurationProperties", + "NetworkSecurityProfile", "Operation", "OperationDisplay", "OperationListResult", "OperationStatus", "PrivateLinkScopedResource", + "ProvisioningIssue", + "ProvisioningIssueProperties", "ProxyResource", - "QueryPacksResource", + "ProxyResourceAutoGenerated", "Resource", + "ResourceAssociation", + "ResourceAutoGenerated", "RestoredLogs", "ResultStatistics", + "RuleDefinition", "SavedSearch", "SavedSearchesListResult", "Schema", @@ -181,18 +242,23 @@ "StorageInsight", "StorageInsightListResult", "StorageInsightStatus", + "SummaryLogs", + "SummaryLogsListResult", + "SummaryLogsRetryBin", + "SummaryLogsRetryBinProperties", "SystemData", - "SystemDataAutoGenerated", "Table", "TablesListResult", "Tag", "TagsResource", "TrackedResource", + "TrackedResourceAutoGenerated", "UsageMetric", "UserAssignedIdentity", "UserIdentityProperties", "Workspace", "WorkspaceCapping", + "WorkspaceFailoverProperties", "WorkspaceFeatures", "WorkspaceListManagementGroupsResult", "WorkspaceListResult", @@ -202,11 +268,13 @@ "WorkspacePurgeBodyFilters", "WorkspacePurgeResponse", "WorkspacePurgeStatusResponse", + "WorkspaceReplicationPatProperties", + "WorkspaceReplicationProperties", "WorkspaceSku", + "AccessRuleDirection", "BillingType", - "Capacity", - "CapacityReservationLevel", "ClusterEntityStatus", + "ClusterReplicationState", "ClusterSkuNameEnum", "ColumnDataTypeHintEnum", "ColumnTypeEnum", @@ -215,21 +283,30 @@ "DataSourceKind", "DataSourceType", "IdentityType", + "IssueType", "LinkedServiceEntityStatus", "ManagedServiceIdentityType", + "NetworkSecurityPerimeterConfigurationProvisioningState", "ProvisioningStateEnum", "PublicNetworkAccessType", "PurgeState", + "ResourceAssociationAccessMode", + "RuleTypeEnum", "SearchSortEnum", + "Severity", "SkuNameEnum", "SourceEnum", + "StatusCodeEnum", "StorageInsightState", "TablePlanEnum", "TableSubTypeEnum", "TableTypeEnum", + "TimeSelectorEnum", "Type", "WorkspaceEntityStatus", + "WorkspaceFailoverState", + "WorkspaceReplicationState", "WorkspaceSkuNameEnum", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py index 612e6e0c9aa8..cd0362a4d76d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_log_analytics_management_client_enums.py @@ -10,6 +10,15 @@ from azure.core import CaseInsensitiveEnumMeta +class AccessRuleDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Direction of Access Rule.""" + + INBOUND = "Inbound" + """Applies to inbound network traffic to the secured resources.""" + OUTBOUND = "Outbound" + """Applies to outbound network traffic from the secured resources""" + + class BillingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Configures whether billing will be only on the cluster or each workspace will be billed by its proportional use. This does not change the overall billing, only how it will be distributed. @@ -20,37 +29,6 @@ class BillingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): WORKSPACES = "Workspaces" -class Capacity(int, Enum, metaclass=CaseInsensitiveEnumMeta): - """The capacity reservation level in Gigabytes for this cluster.""" - - ONE_HUNDRED = 100 - TWO_HUNDRED = 200 - THREE_HUNDRED = 300 - FOUR_HUNDRED = 400 - FIVE_HUNDRED = 500 - TEN_HUNDRED = 1000 - TWO_THOUSAND = 2000 - FIVE_THOUSAND = 5000 - TEN_THOUSAND = 10000 - TWENTY_FIVE_THOUSAND = 25000 - FIFTY_THOUSAND = 50000 - - -class CapacityReservationLevel(int, Enum, metaclass=CaseInsensitiveEnumMeta): - """The capacity reservation level in GB for this workspace, when CapacityReservation sku is - selected. - """ - - ONE_HUNDRED = 100 - TWO_HUNDRED = 200 - THREE_HUNDRED = 300 - FOUR_HUNDRED = 400 - FIVE_HUNDRED = 500 - TEN_HUNDRED = 1000 - TWO_THOUSAND = 2000 - FIVE_THOUSAND = 5000 - - class ClusterEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The provisioning state of the cluster.""" @@ -63,6 +41,20 @@ class ClusterEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): UPDATING = "Updating" +class ClusterReplicationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provisioning state of the cluster replication.""" + + SUCCEEDED = "Succeeded" + ENABLE_REQUESTED = "EnableRequested" + ENABLING = "Enabling" + DISABLE_REQUESTED = "DisableRequested" + DISABLING = "Disabling" + ROLLBACK_REQUESTED = "RollbackRequested" + ROLLING_BACK = "RollingBack" + FAILED = "Failed" + CANCELED = "Canceled" + + class ClusterSkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The SKU (tier) of a cluster.""" @@ -173,17 +165,29 @@ class DataSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class IdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of identity that creates/modifies resources.""" + """Type of managed service identity.""" - USER = "user" - APPLICATION = "application" - MANAGED_IDENTITY = "managedIdentity" - KEY = "key" SYSTEM_ASSIGNED = "SystemAssigned" USER_ASSIGNED = "UserAssigned" NONE = "None" +class IssueType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of issue.""" + + UNKNOWN = "Unknown" + """Unknown issue type""" + CONFIGURATION_PROPAGATION_FAILURE = "ConfigurationPropagationFailure" + """An error occurred while applying the network security perimeter (NSP) configuration.""" + MISSING_PERIMETER_CONFIGURATION = "MissingPerimeterConfiguration" + """A network connectivity issue is happening on the resource which could be addressed either by + adding new resources to the network security perimeter (NSP) or by modifying access rules.""" + MISSING_IDENTITY_CONFIGURATION = "MissingIdentityConfiguration" + """An managed identity hasn't been associated with the resource. The resource will still be able + to validate inbound traffic from the network security perimeter (NSP) or matching inbound + access rules, but it won't be able to perform outbound access as a member of the NSP.""" + + class LinkedServiceEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The provisioning state of the linked service.""" @@ -204,6 +208,20 @@ class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" +class NetworkSecurityPerimeterConfigurationProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of a network security perimeter configuration that is being created or + updated. + """ + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + ACCEPTED = "Accepted" + FAILED = "Failed" + CANCELED = "Canceled" + + class ProvisioningStateEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Table's current provisioning state. If set to 'updating', indicates a resource lock due to ongoing operation, forbidding any update to the table until the ongoing operation is concluded. @@ -227,6 +245,9 @@ class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enables connectivity to Log Analytics through public DNS.""" DISABLED = "Disabled" """Disables public connectivity to Log Analytics through public DNS.""" + SECURED_BY_PERIMETER = "SecuredByPerimeter" + """Resource is only accessible from private networks and access approved by network security + perimeter associated to this resource.""" class PurgeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -236,6 +257,24 @@ class PurgeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): COMPLETED = "completed" +class ResourceAssociationAccessMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Access mode of the resource association.""" + + ENFORCED = "Enforced" + """Enforced access mode - traffic to the resource that failed access checks is blocked""" + LEARNING = "Learning" + """Learning access mode - traffic to the resource is enabled for analysis but not blocked""" + AUDIT = "Audit" + """Audit access mode - traffic to the resource that fails access checks is logged but not blocked""" + + +class RuleTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SummaryRules rule type: User.""" + + USER = "User" + """User defined summary rule. This is the definition for rules created and defined by users.""" + + class SearchSortEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The sort order of the search.""" @@ -243,6 +282,13 @@ class SearchSortEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): DESC = "desc" +class Severity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Severity of the issue.""" + + WARNING = "Warning" + ERROR = "Error" + + class SkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The name of the Service Tier.""" @@ -265,6 +311,15 @@ class SourceEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Tables created by the owner of the Workspace, and only found in this Workspace.""" +class StatusCodeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates the reason for rule deactivation.""" + + USER_ACTION = "UserAction" + """Summary rule stop originated from a user action (Stop was called).""" + DATA_PLANE_ERROR = "DataPlaneError" + """Summary rule stop was caused due to data plane related error.""" + + class StorageInsightState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The state of the storage insight connection to the workspace.""" @@ -276,9 +331,11 @@ class TablePlanEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Instruct the system how to handle and charge the logs ingested to this table.""" BASIC = "Basic" - """Logs that are adjusted to support high volume low value verbose logs.""" + """Medium-touch logs needed for troubleshooting and incident response.""" ANALYTICS = "Analytics" - """Logs that allow monitoring and analytics.""" + """High-value logs used for continuous monitoring, real-time detection, and performance analytics.""" + AUXILIARY = "Auxiliary" + """Low-touch logs, such as verbose logs, and data required for auditing and compliance.""" class TableSubTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -310,6 +367,13 @@ class TableTypeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Data collected by a search job.""" +class TimeSelectorEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The time cursor used in Summary rules bins processing, e.g. TimeGenerated.""" + + TIME_GENERATED = "TimeGenerated" + """TimeGenerated.""" + + class Type(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of the destination resource.""" @@ -329,6 +393,30 @@ class WorkspaceEntityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): UPDATING = "Updating" +class WorkspaceFailoverState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The failover state of the replication.""" + + INACTIVE = "Inactive" + ACTIVATING = "Activating" + ACTIVE = "Active" + DEACTIVATING = "Deactivating" + FAILED = "Failed" + + +class WorkspaceReplicationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provisioning state of the replication.""" + + SUCCEEDED = "Succeeded" + ENABLE_REQUESTED = "EnableRequested" + ENABLING = "Enabling" + DISABLE_REQUESTED = "DisableRequested" + DISABLING = "Disabling" + ROLLBACK_REQUESTED = "RollbackRequested" + ROLLING_BACK = "RollingBack" + FAILED = "Failed" + CANCELED = "Canceled" + + class WorkspaceSkuNameEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The name of the SKU.""" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py index eaba73b1ab6d..66c732591b15 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/_models_py3.py @@ -1,5 +1,5 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 -# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,21 +7,136 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping import datetime -import sys -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union +from typing import Any, Optional, TYPE_CHECKING, Union -from .. import _serialization - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +from .._utils import serialization as _serialization if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from .. import models as _models -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +JSON = MutableMapping[str, Any] + + +class AccessRule(_serialization.Model): + """Access rule in a network security perimeter configuration profile. + + :ivar name: Name of the access rule. + :vartype name: str + :ivar properties: Properties of Access Rule. + :vartype properties: ~azure.mgmt.loganalytics.models.AccessRuleProperties + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "AccessRuleProperties"}, + } + + def __init__( + self, *, name: Optional[str] = None, properties: Optional["_models.AccessRuleProperties"] = None, **kwargs: Any + ) -> None: + """ + :keyword name: Name of the access rule. + :paramtype name: str + :keyword properties: Properties of Access Rule. + :paramtype properties: ~azure.mgmt.loganalytics.models.AccessRuleProperties + """ + super().__init__(**kwargs) + self.name = name + self.properties = properties + + +class AccessRuleProperties(_serialization.Model): + """Properties of Access Rule. + + :ivar direction: Direction of Access Rule. Known values are: "Inbound" and "Outbound". + :vartype direction: str or ~azure.mgmt.loganalytics.models.AccessRuleDirection + :ivar address_prefixes: Address prefixes in the CIDR format for inbound rules. + :vartype address_prefixes: list[str] + :ivar subscriptions: Subscriptions for inbound rules. + :vartype subscriptions: + list[~azure.mgmt.loganalytics.models.AccessRulePropertiesSubscriptionsItem] + :ivar network_security_perimeters: Network security perimeters for inbound rules. + :vartype network_security_perimeters: + list[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeter] + :ivar fully_qualified_domain_names: Fully qualified domain names (FQDN) for outbound rules. + :vartype fully_qualified_domain_names: list[str] + :ivar email_addresses: Email addresses for outbound rules. + :vartype email_addresses: list[str] + :ivar phone_numbers: Phone numbers for outbound rules. + :vartype phone_numbers: list[str] + """ + + _attribute_map = { + "direction": {"key": "direction", "type": "str"}, + "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, + "subscriptions": {"key": "subscriptions", "type": "[AccessRulePropertiesSubscriptionsItem]"}, + "network_security_perimeters": {"key": "networkSecurityPerimeters", "type": "[NetworkSecurityPerimeter]"}, + "fully_qualified_domain_names": {"key": "fullyQualifiedDomainNames", "type": "[str]"}, + "email_addresses": {"key": "emailAddresses", "type": "[str]"}, + "phone_numbers": {"key": "phoneNumbers", "type": "[str]"}, + } + + def __init__( + self, + *, + direction: Optional[Union[str, "_models.AccessRuleDirection"]] = None, + address_prefixes: Optional[list[str]] = None, + subscriptions: Optional[list["_models.AccessRulePropertiesSubscriptionsItem"]] = None, + network_security_perimeters: Optional[list["_models.NetworkSecurityPerimeter"]] = None, + fully_qualified_domain_names: Optional[list[str]] = None, + email_addresses: Optional[list[str]] = None, + phone_numbers: Optional[list[str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword direction: Direction of Access Rule. Known values are: "Inbound" and "Outbound". + :paramtype direction: str or ~azure.mgmt.loganalytics.models.AccessRuleDirection + :keyword address_prefixes: Address prefixes in the CIDR format for inbound rules. + :paramtype address_prefixes: list[str] + :keyword subscriptions: Subscriptions for inbound rules. + :paramtype subscriptions: + list[~azure.mgmt.loganalytics.models.AccessRulePropertiesSubscriptionsItem] + :keyword network_security_perimeters: Network security perimeters for inbound rules. + :paramtype network_security_perimeters: + list[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeter] + :keyword fully_qualified_domain_names: Fully qualified domain names (FQDN) for outbound rules. + :paramtype fully_qualified_domain_names: list[str] + :keyword email_addresses: Email addresses for outbound rules. + :paramtype email_addresses: list[str] + :keyword phone_numbers: Phone numbers for outbound rules. + :paramtype phone_numbers: list[str] + """ + super().__init__(**kwargs) + self.direction = direction + self.address_prefixes = address_prefixes + self.subscriptions = subscriptions + self.network_security_perimeters = network_security_perimeters + self.fully_qualified_domain_names = fully_qualified_domain_names + self.email_addresses = email_addresses + self.phone_numbers = phone_numbers + + +class AccessRulePropertiesSubscriptionsItem(_serialization.Model): + """Subscription identifiers. + + :ivar id: The fully qualified Azure resource ID of the subscription e.g. + ('/subscriptions/00000000-0000-0000-0000-000000000000'). + :vartype id: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + } + + def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylint: disable=redefined-builtin + """ + :keyword id: The fully qualified Azure resource ID of the subscription e.g. + ('/subscriptions/00000000-0000-0000-0000-000000000000'). + :paramtype id: str + """ + super().__init__(**kwargs) + self.id = id class AssociatedWorkspace(_serialization.Model): @@ -34,10 +149,10 @@ class AssociatedWorkspace(_serialization.Model): :ivar workspace_name: Associated workspace resource name. :vartype workspace_name: str :ivar resource_id: Associated workspace arm resource id, in the form of: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}. # pylint: disable=line-too-long + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}. :vartype resource_id: str :ivar associate_date: The time of workspace association. - :vartype associate_date: str + :vartype associate_date: ~datetime.datetime """ _validation = { @@ -51,16 +166,16 @@ class AssociatedWorkspace(_serialization.Model): "workspace_id": {"key": "workspaceId", "type": "str"}, "workspace_name": {"key": "workspaceName", "type": "str"}, "resource_id": {"key": "resourceId", "type": "str"}, - "associate_date": {"key": "associateDate", "type": "str"}, + "associate_date": {"key": "associateDate", "type": "iso-8601"}, } def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.workspace_id = None - self.workspace_name = None - self.resource_id = None - self.associate_date = None + self.workspace_id: Optional[str] = None + self.workspace_name: Optional[str] = None + self.resource_id: Optional[str] = None + self.associate_date: Optional[datetime.datetime] = None class AvailableServiceTier(_serialization.Model): @@ -110,13 +225,13 @@ class AvailableServiceTier(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.service_tier = None - self.enabled = None - self.minimum_retention = None - self.maximum_retention = None - self.default_retention = None - self.capacity_reservation_level = None - self.last_sku_update = None + self.service_tier: Optional[Union[str, "_models.SkuNameEnum"]] = None + self.enabled: Optional[bool] = None + self.minimum_retention: Optional[int] = None + self.maximum_retention: Optional[int] = None + self.default_retention: Optional[int] = None + self.capacity_reservation_level: Optional[int] = None + self.last_sku_update: Optional[str] = None class Resource(_serialization.Model): @@ -125,7 +240,7 @@ class Resource(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -149,9 +264,9 @@ class Resource(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.id = None - self.name = None - self.type = None + self.id: Optional[str] = None + self.name: Optional[str] = None + self.type: Optional[str] = None class AzureEntityResource(Resource): @@ -160,7 +275,7 @@ class AzureEntityResource(Resource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -188,45 +303,7 @@ class AzureEntityResource(Resource): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.etag = None - - -class AzureResourceProperties(_serialization.Model): - """An Azure resource QueryPack-Query object. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Azure resource Id. - :vartype id: str - :ivar name: Azure resource name. - :vartype name: str - :ivar type: Azure resource type. - :vartype type: str - :ivar system_data: Read only system data. - :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.system_data = None + self.etag: Optional[str] = None class CapacityReservationProperties(_serialization.Model): @@ -235,7 +312,7 @@ class CapacityReservationProperties(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar last_sku_update: The last time Sku was updated. - :vartype last_sku_update: str + :vartype last_sku_update: ~datetime.datetime :ivar min_capacity: Minimum CapacityReservation value in Gigabytes. :vartype min_capacity: int """ @@ -246,15 +323,15 @@ class CapacityReservationProperties(_serialization.Model): } _attribute_map = { - "last_sku_update": {"key": "lastSkuUpdate", "type": "str"}, + "last_sku_update": {"key": "lastSkuUpdate", "type": "iso-8601"}, "min_capacity": {"key": "minCapacity", "type": "int"}, } def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.last_sku_update = None - self.min_capacity = None + self.last_sku_update: Optional[datetime.datetime] = None + self.min_capacity: Optional[int] = None class TrackedResource(Resource): @@ -266,7 +343,7 @@ class TrackedResource(Resource): All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -294,7 +371,7 @@ class TrackedResource(Resource): "location": {"key": "location", "type": "str"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + def __init__(self, *, location: str, tags: Optional[dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -306,7 +383,7 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw self.location = location -class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes +class Cluster(TrackedResource): """The top level Log Analytics cluster resource container. Variables are only populated by the server, and will be ignored when sending a request. @@ -314,7 +391,7 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -347,14 +424,16 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes :ivar key_vault_properties: The associated key properties. :vartype key_vault_properties: ~azure.mgmt.loganalytics.models.KeyVaultProperties :ivar last_modified_date: The last time the cluster was updated. - :vartype last_modified_date: str + :vartype last_modified_date: ~datetime.datetime :ivar created_date: The cluster creation time. - :vartype created_date: str + :vartype created_date: ~datetime.datetime :ivar associated_workspaces: The list of Log Analytics workspaces associated with the cluster. :vartype associated_workspaces: list[~azure.mgmt.loganalytics.models.AssociatedWorkspace] :ivar capacity_reservation_properties: Additional properties for capacity reservation. :vartype capacity_reservation_properties: ~azure.mgmt.loganalytics.models.CapacityReservationProperties + :ivar replication: Cluster's replication properties. + :vartype replication: ~azure.mgmt.loganalytics.models.ClusterReplicationProperties """ _validation = { @@ -382,28 +461,30 @@ class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes "is_availability_zones_enabled": {"key": "properties.isAvailabilityZonesEnabled", "type": "bool"}, "billing_type": {"key": "properties.billingType", "type": "str"}, "key_vault_properties": {"key": "properties.keyVaultProperties", "type": "KeyVaultProperties"}, - "last_modified_date": {"key": "properties.lastModifiedDate", "type": "str"}, - "created_date": {"key": "properties.createdDate", "type": "str"}, + "last_modified_date": {"key": "properties.lastModifiedDate", "type": "iso-8601"}, + "created_date": {"key": "properties.createdDate", "type": "iso-8601"}, "associated_workspaces": {"key": "properties.associatedWorkspaces", "type": "[AssociatedWorkspace]"}, "capacity_reservation_properties": { "key": "properties.capacityReservationProperties", "type": "CapacityReservationProperties", }, + "replication": {"key": "properties.replication", "type": "ClusterReplicationProperties"}, } def __init__( self, *, location: str, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, identity: Optional["_models.ManagedServiceIdentity"] = None, sku: Optional["_models.ClusterSku"] = None, is_double_encryption_enabled: Optional[bool] = None, is_availability_zones_enabled: Optional[bool] = None, billing_type: Optional[Union[str, "_models.BillingType"]] = None, key_vault_properties: Optional["_models.KeyVaultProperties"] = None, - associated_workspaces: Optional[List["_models.AssociatedWorkspace"]] = None, + associated_workspaces: Optional[list["_models.AssociatedWorkspace"]] = None, capacity_reservation_properties: Optional["_models.CapacityReservationProperties"] = None, + replication: Optional["_models.ClusterReplicationProperties"] = None, **kwargs: Any ) -> None: """ @@ -434,20 +515,23 @@ def __init__( :keyword capacity_reservation_properties: Additional properties for capacity reservation. :paramtype capacity_reservation_properties: ~azure.mgmt.loganalytics.models.CapacityReservationProperties + :keyword replication: Cluster's replication properties. + :paramtype replication: ~azure.mgmt.loganalytics.models.ClusterReplicationProperties """ super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.sku = sku - self.cluster_id = None - self.provisioning_state = None + self.cluster_id: Optional[str] = None + self.provisioning_state: Optional[Union[str, "_models.ClusterEntityStatus"]] = None self.is_double_encryption_enabled = is_double_encryption_enabled self.is_availability_zones_enabled = is_availability_zones_enabled self.billing_type = billing_type self.key_vault_properties = key_vault_properties - self.last_modified_date = None - self.created_date = None + self.last_modified_date: Optional[datetime.datetime] = None + self.created_date: Optional[datetime.datetime] = None self.associated_workspaces = associated_workspaces self.capacity_reservation_properties = capacity_reservation_properties + self.replication = replication class ClusterListResult(_serialization.Model): @@ -465,7 +549,7 @@ class ClusterListResult(_serialization.Model): } def __init__( - self, *, next_link: Optional[str] = None, value: Optional[List["_models.Cluster"]] = None, **kwargs: Any + self, *, next_link: Optional[str] = None, value: Optional[list["_models.Cluster"]] = None, **kwargs: Any ) -> None: """ :keyword next_link: The link used to get the next page of recommendations. @@ -506,7 +590,7 @@ def __init__( *, identity: Optional["_models.ManagedServiceIdentity"] = None, sku: Optional["_models.ClusterSku"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, key_vault_properties: Optional["_models.KeyVaultProperties"] = None, billing_type: Optional[Union[str, "_models.BillingType"]] = None, **kwargs: Any @@ -532,12 +616,78 @@ def __init__( self.billing_type = billing_type +class ClusterReplicationProperties(_serialization.Model): + """Cluster replication properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: The secondary location of the replication. If replication is being enabled, + enabled must be provided. + :vartype location: str + :ivar enabled: Specifies whether the replication is enabled or not. When true the cluster is + replicate to the specified location. + :vartype enabled: bool + :ivar is_availability_zones_enabled: Should enable AvailabilityZones for the given replicated + cluster. + :vartype is_availability_zones_enabled: bool + :ivar provisioning_state: The provisioning state of the cluster replication. Known values are: + "Succeeded", "EnableRequested", "Enabling", "DisableRequested", "Disabling", + "RollbackRequested", "RollingBack", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.loganalytics.models.ClusterReplicationState + :ivar created_date: The cluster's replication creation time. + :vartype created_date: ~datetime.datetime + :ivar last_modified_date: The last time the cluster's replication was updated. + :vartype last_modified_date: ~datetime.datetime + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "created_date": {"readonly": True}, + "last_modified_date": {"readonly": True}, + } + + _attribute_map = { + "location": {"key": "location", "type": "str"}, + "enabled": {"key": "enabled", "type": "bool"}, + "is_availability_zones_enabled": {"key": "isAvailabilityZonesEnabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "created_date": {"key": "createdDate", "type": "iso-8601"}, + "last_modified_date": {"key": "lastModifiedDate", "type": "iso-8601"}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + enabled: Optional[bool] = None, + is_availability_zones_enabled: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword location: The secondary location of the replication. If replication is being enabled, + enabled must be provided. + :paramtype location: str + :keyword enabled: Specifies whether the replication is enabled or not. When true the cluster is + replicate to the specified location. + :paramtype enabled: bool + :keyword is_availability_zones_enabled: Should enable AvailabilityZones for the given + replicated cluster. + :paramtype is_availability_zones_enabled: bool + """ + super().__init__(**kwargs) + self.location = location + self.enabled = enabled + self.is_availability_zones_enabled = is_availability_zones_enabled + self.provisioning_state: Optional[Union[str, "_models.ClusterReplicationState"]] = None + self.created_date: Optional[datetime.datetime] = None + self.last_modified_date: Optional[datetime.datetime] = None + + class ClusterSku(_serialization.Model): """The cluster sku definition. - :ivar capacity: The capacity reservation level in Gigabytes for this cluster. Known values are: - 100, 200, 300, 400, 500, 1000, 2000, 5000, 10000, 25000, and 50000. - :vartype capacity: int or ~azure.mgmt.loganalytics.models.Capacity + :ivar capacity: The capacity reservation level in Gigabytes for this cluster. + :vartype capacity: int :ivar name: The SKU (tier) of a cluster. "CapacityReservation" :vartype name: str or ~azure.mgmt.loganalytics.models.ClusterSkuNameEnum """ @@ -550,14 +700,13 @@ class ClusterSku(_serialization.Model): def __init__( self, *, - capacity: Optional[Union[int, "_models.Capacity"]] = None, + capacity: Optional[int] = None, name: Optional[Union[str, "_models.ClusterSkuNameEnum"]] = None, **kwargs: Any ) -> None: """ - :keyword capacity: The capacity reservation level in Gigabytes for this cluster. Known values - are: 100, 200, 300, 400, 500, 1000, 2000, 5000, 10000, 25000, and 50000. - :paramtype capacity: int or ~azure.mgmt.loganalytics.models.Capacity + :keyword capacity: The capacity reservation level in Gigabytes for this cluster. + :paramtype capacity: int :keyword name: The SKU (tier) of a cluster. "CapacityReservation" :paramtype name: str or ~azure.mgmt.loganalytics.models.ClusterSkuNameEnum """ @@ -634,8 +783,8 @@ def __init__( self.data_type_hint = data_type_hint self.display_name = display_name self.description = description - self.is_default_display = None - self.is_hidden = None + self.is_default_display: Optional[bool] = None + self.is_hidden: Optional[bool] = None class CoreSummary(_serialization.Model): @@ -677,7 +826,7 @@ class ProxyResource(Resource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -687,13 +836,13 @@ class ProxyResource(Resource): """ -class DataExport(ProxyResource): # pylint: disable=too-many-instance-attributes +class DataExport(ProxyResource): """The top level data export resource container. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -746,7 +895,7 @@ def __init__( self, *, data_export_id: Optional[str] = None, - table_names: Optional[List[str]] = None, + table_names: Optional[list[str]] = None, enable: Optional[bool] = None, created_date: Optional[str] = None, last_modified_date: Optional[str] = None, @@ -779,7 +928,7 @@ def __init__( self.created_date = created_date self.last_modified_date = last_modified_date self.resource_id = resource_id - self.type_properties_destination_type = None + self.type_properties_destination_type: Optional[Union[str, "_models.Type"]] = None self.event_hub_name = event_hub_name @@ -794,7 +943,7 @@ class DataExportListResult(_serialization.Model): "value": {"key": "value", "type": "[DataExport]"}, } - def __init__(self, *, value: Optional[List["_models.DataExport"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.DataExport"]] = None, **kwargs: Any) -> None: """ :keyword value: List of data export instances within a workspace.. :paramtype value: list[~azure.mgmt.loganalytics.models.DataExport] @@ -811,7 +960,7 @@ class DataSource(ProxyResource): All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -863,7 +1012,7 @@ def __init__( properties: JSON, kind: Union[str, "_models.DataSourceKind"], etag: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, **kwargs: Any ) -> None: """ @@ -949,7 +1098,7 @@ class DataSourceListResult(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.DataSource"]] = None, next_link: Optional[str] = None, **kwargs: Any + self, *, value: Optional[list["_models.DataSource"]] = None, next_link: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword value: A list of datasources. @@ -986,8 +1135,8 @@ class ErrorAdditionalInfo(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None - self.info = None + self.type: Optional[str] = None + self.info: Optional[JSON] = None class ErrorDetail(_serialization.Model): @@ -1026,11 +1175,97 @@ class ErrorDetail(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.code = None - self.message = None - self.target = None - self.details = None - self.additional_info = None + self.code: Optional[str] = None + self.message: Optional[str] = None + self.target: Optional[str] = None + self.details: Optional[list["_models.ErrorDetail"]] = None + self.additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = None + + +class ErrorDetailAutoGenerated(_serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.loganalytics.models.ErrorAdditionalInfo] + """ + + _validation = { + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetailAutoGenerated]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.code: Optional[str] = None + self.message: Optional[str] = None + self.target: Optional[str] = None + self.details: Optional[list["_models.ErrorDetailAutoGenerated"]] = None + self.additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = None + + +class ErrorDetailAutoGenerated2(_serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated2] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.loganalytics.models.ErrorAdditionalInfo] + """ + + _validation = { + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetailAutoGenerated2]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.code: Optional[str] = None + self.message: Optional[str] = None + self.target: Optional[str] = None + self.details: Optional[list["_models.ErrorDetailAutoGenerated2"]] = None + self.additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = None class ErrorResponse(_serialization.Model): @@ -1054,6 +1289,48 @@ def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: A self.error = error +class ErrorResponseAutoGenerated(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated + """ + + _attribute_map = { + "error": {"key": "error", "type": "ErrorDetailAutoGenerated"}, + } + + def __init__(self, *, error: Optional["_models.ErrorDetailAutoGenerated"] = None, **kwargs: Any) -> None: + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated + """ + super().__init__(**kwargs) + self.error = error + + +class ErrorResponseAutoGenerated2(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated2 + """ + + _attribute_map = { + "error": {"key": "error", "type": "ErrorDetailAutoGenerated2"}, + } + + def __init__(self, *, error: Optional["_models.ErrorDetailAutoGenerated2"] = None, **kwargs: Any) -> None: + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.loganalytics.models.ErrorDetailAutoGenerated2 + """ + super().__init__(**kwargs) + self.error = error + + class Identity(_serialization.Model): """Identity for the resource. @@ -1065,12 +1342,12 @@ class Identity(_serialization.Model): :vartype principal_id: str :ivar tenant_id: The tenant ID of resource. :vartype tenant_id: str - :ivar type: Type of managed service identity. Required. Known values are: "user", - "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and "None". + :ivar type: Type of managed service identity. Required. Known values are: "SystemAssigned", + "UserAssigned", and "None". :vartype type: str or ~azure.mgmt.loganalytics.models.IdentityType :ivar user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. # pylint: disable=line-too-long + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. :vartype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserIdentityProperties] """ @@ -1092,22 +1369,22 @@ def __init__( self, *, type: Union[str, "_models.IdentityType"], - user_assigned_identities: Optional[Dict[str, "_models.UserIdentityProperties"]] = None, + user_assigned_identities: Optional[dict[str, "_models.UserIdentityProperties"]] = None, **kwargs: Any ) -> None: """ - :keyword type: Type of managed service identity. Required. Known values are: "user", - "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and "None". + :keyword type: Type of managed service identity. Required. Known values are: "SystemAssigned", + "UserAssigned", and "None". :paramtype type: str or ~azure.mgmt.loganalytics.models.IdentityType :keyword user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. # pylint: disable=line-too-long + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. :paramtype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserIdentityProperties] """ super().__init__(**kwargs) - self.principal_id = None - self.tenant_id = None + self.principal_id: Optional[str] = None + self.tenant_id: Optional[str] = None self.type = type self.user_assigned_identities = user_assigned_identities @@ -1205,7 +1482,7 @@ class LinkedService(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -1244,7 +1521,7 @@ class LinkedService(ProxyResource): def __init__( self, *, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, resource_id: Optional[str] = None, write_access_resource_id: Optional[str] = None, provisioning_state: Optional[Union[str, "_models.LinkedServiceEntityStatus"]] = None, @@ -1281,7 +1558,7 @@ class LinkedServiceListResult(_serialization.Model): "value": {"key": "value", "type": "[LinkedService]"}, } - def __init__(self, *, value: Optional[List["_models.LinkedService"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.LinkedService"]] = None, **kwargs: Any) -> None: """ :keyword value: The list of linked service instances. :paramtype value: list[~azure.mgmt.loganalytics.models.LinkedService] @@ -1301,7 +1578,7 @@ class LinkedStorageAccountsListResult(_serialization.Model): "value": {"key": "value", "type": "[LinkedStorageAccountsResource]"}, } - def __init__(self, *, value: Optional[List["_models.LinkedStorageAccountsResource"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.LinkedStorageAccountsResource"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of linked storage accounts instances. :paramtype value: list[~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource] @@ -1316,7 +1593,7 @@ class LinkedStorageAccountsResource(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -1345,39 +1622,87 @@ class LinkedStorageAccountsResource(ProxyResource): "storage_account_ids": {"key": "properties.storageAccountIds", "type": "[str]"}, } - def __init__(self, *, storage_account_ids: Optional[List[str]] = None, **kwargs: Any) -> None: + def __init__(self, *, storage_account_ids: Optional[list[str]] = None, **kwargs: Any) -> None: """ :keyword storage_account_ids: Linked storage accounts resources ids. :paramtype storage_account_ids: list[str] """ super().__init__(**kwargs) - self.data_source_type = None + self.data_source_type: Optional[Union[str, "_models.DataSourceType"]] = None self.storage_account_ids = storage_account_ids -class QueryPacksResource(_serialization.Model): - """An azure resource object. +class ResourceAutoGenerated(_serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.id: Optional[str] = None + self.name: Optional[str] = None + self.type: Optional[str] = None + self.system_data: Optional["_models.SystemData"] = None + + +class TrackedResourceAutoGenerated(ResourceAutoGenerated): + """The resource model definition for an Azure Resource Manager tracked top level resource which + has 'tags' and a 'location'. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to server. - :ivar id: Azure resource Id. + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". :vartype id: str - :ivar name: Azure resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: Azure resource type. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar location: Resource location. Required. - :vartype location: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData :ivar tags: Resource tags. :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str """ _validation = { "id": {"readonly": True}, "name": {"readonly": True}, "type": {"readonly": True}, + "system_data": {"readonly": True}, "location": {"required": True}, } @@ -1385,42 +1710,45 @@ class QueryPacksResource(_serialization.Model): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "location": {"key": "location", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + def __init__(self, *, location: str, tags: Optional[dict[str, str]] = None, **kwargs: Any) -> None: """ - :keyword location: Resource location. Required. - :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str """ super().__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location self.tags = tags + self.location = location -class LogAnalyticsQueryPack(QueryPacksResource): +class LogAnalyticsQueryPack(TrackedResourceAutoGenerated): """An Log Analytics QueryPack definition. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to server. - :ivar id: Azure resource Id. + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". :vartype id: str - :ivar name: Azure resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: Azure resource type. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar location: Resource location. Required. - :vartype location: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData :ivar tags: Resource tags. :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str :ivar query_pack_id: The unique ID of your application. This field cannot be changed. :vartype query_pack_id: str :ivar time_created: Creation Date for the Log Analytics QueryPack, in ISO 8601 format. @@ -1437,6 +1765,7 @@ class LogAnalyticsQueryPack(QueryPacksResource): "id": {"readonly": True}, "name": {"readonly": True}, "type": {"readonly": True}, + "system_data": {"readonly": True}, "location": {"required": True}, "query_pack_id": {"readonly": True}, "time_created": {"readonly": True}, @@ -1448,26 +1777,27 @@ class LogAnalyticsQueryPack(QueryPacksResource): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "location": {"key": "location", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, "query_pack_id": {"key": "properties.queryPackId", "type": "str"}, "time_created": {"key": "properties.timeCreated", "type": "iso-8601"}, "time_modified": {"key": "properties.timeModified", "type": "iso-8601"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } - def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + def __init__(self, *, location: str, tags: Optional[dict[str, str]] = None, **kwargs: Any) -> None: """ - :keyword location: Resource location. Required. - :paramtype location: str :keyword tags: Resource tags. :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str """ - super().__init__(location=location, tags=tags, **kwargs) - self.query_pack_id = None - self.time_created = None - self.time_modified = None - self.provisioning_state = None + super().__init__(tags=tags, location=location, **kwargs) + self.query_pack_id: Optional[str] = None + self.time_created: Optional[datetime.datetime] = None + self.time_modified: Optional[datetime.datetime] = None + self.provisioning_state: Optional[str] = None class LogAnalyticsQueryPackListResult(_serialization.Model): @@ -1492,7 +1822,7 @@ class LogAnalyticsQueryPackListResult(_serialization.Model): } def __init__( - self, *, value: List["_models.LogAnalyticsQueryPack"], next_link: Optional[str] = None, **kwargs: Any + self, *, value: list["_models.LogAnalyticsQueryPack"], next_link: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword value: List of Log Analytics QueryPack definitions. Required. @@ -1506,18 +1836,41 @@ def __init__( self.next_link = next_link -class LogAnalyticsQueryPackQuery(AzureResourceProperties): # pylint: disable=too-many-instance-attributes +class ProxyResourceAutoGenerated(ResourceAutoGenerated): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have + tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData + """ + + +class LogAnalyticsQueryPackQuery(ProxyResourceAutoGenerated): """A Log Analytics QueryPack-Query definition. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Azure resource Id. + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". :vartype id: str - :ivar name: Azure resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: Azure resource type. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar system_data: Read only system data. + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData :ivar id_properties_id: The unique ID of your application. This field cannot be changed. :vartype id_properties_id: str @@ -1576,7 +1929,7 @@ def __init__( description: Optional[str] = None, body: Optional[str] = None, related: Optional["_models.LogAnalyticsQueryPackQueryPropertiesRelated"] = None, - tags: Optional[Dict[str, List[str]]] = None, + tags: Optional[dict[str, list[str]]] = None, properties: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -1595,11 +1948,11 @@ def __init__( :paramtype properties: JSON """ super().__init__(**kwargs) - self.id_properties_id = None + self.id_properties_id: Optional[str] = None self.display_name = display_name - self.time_created = None - self.time_modified = None - self.author = None + self.time_created: Optional[datetime.datetime] = None + self.time_modified: Optional[datetime.datetime] = None + self.author: Optional[str] = None self.description = description self.body = body self.related = related @@ -1629,7 +1982,7 @@ class LogAnalyticsQueryPackQueryListResult(_serialization.Model): } def __init__( - self, *, value: List["_models.LogAnalyticsQueryPackQuery"], next_link: Optional[str] = None, **kwargs: Any + self, *, value: list["_models.LogAnalyticsQueryPackQuery"], next_link: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword value: List of Log Analytics QueryPack Query definitions. Required. @@ -1663,9 +2016,9 @@ class LogAnalyticsQueryPackQueryPropertiesRelated(_serialization.Model): # pyli def __init__( self, *, - categories: Optional[List[str]] = None, - resource_types: Optional[List[str]] = None, - solutions: Optional[List[str]] = None, + categories: Optional[list[str]] = None, + resource_types: Optional[list[str]] = None, + solutions: Optional[list[str]] = None, **kwargs: Any ) -> None: """ @@ -1701,7 +2054,7 @@ def __init__( self, *, related: Optional["_models.LogAnalyticsQueryPackQuerySearchPropertiesRelated"] = None, - tags: Optional[Dict[str, List[str]]] = None, + tags: Optional[dict[str, list[str]]] = None, **kwargs: Any ) -> None: """ @@ -1736,9 +2089,9 @@ class LogAnalyticsQueryPackQuerySearchPropertiesRelated(_serialization.Model): def __init__( self, *, - categories: Optional[List[str]] = None, - resource_types: Optional[List[str]] = None, - solutions: Optional[List[str]] = None, + categories: Optional[list[str]] = None, + resource_types: Optional[list[str]] = None, + solutions: Optional[list[str]] = None, **kwargs: Any ) -> None: """ @@ -1774,7 +2127,7 @@ class ManagedServiceIdentity(_serialization.Model): :vartype type: str or ~azure.mgmt.loganalytics.models.ManagedServiceIdentityType :ivar user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests. :vartype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserAssignedIdentity] @@ -1797,7 +2150,7 @@ def __init__( self, *, type: Union[str, "_models.ManagedServiceIdentityType"], - user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + user_assigned_identities: Optional[dict[str, "_models.UserAssignedIdentity"]] = None, **kwargs: Any ) -> None: """ @@ -1807,14 +2160,14 @@ def __init__( :paramtype type: str or ~azure.mgmt.loganalytics.models.ManagedServiceIdentityType :keyword user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests. :paramtype user_assigned_identities: dict[str, ~azure.mgmt.loganalytics.models.UserAssignedIdentity] """ super().__init__(**kwargs) - self.principal_id = None - self.tenant_id = None + self.principal_id: Optional[str] = None + self.tenant_id: Optional[str] = None self.type = type self.user_assigned_identities = user_assigned_identities @@ -1919,6 +2272,237 @@ def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str self.localized_value = localized_value +class NetworkSecurityPerimeter(_serialization.Model): + """Information about a network security perimeter (NSP). + + :ivar id: Fully qualified Azure resource ID of the NSP resource. + :vartype id: str + :ivar perimeter_guid: Universal unique ID (UUID) of the network security perimeter. + :vartype perimeter_guid: str + :ivar location: Location of the network security perimeter. + :vartype location: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "perimeter_guid": {"key": "perimeterGuid", "type": "str"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + perimeter_guid: Optional[str] = None, + location: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword id: Fully qualified Azure resource ID of the NSP resource. + :paramtype id: str + :keyword perimeter_guid: Universal unique ID (UUID) of the network security perimeter. + :paramtype perimeter_guid: str + :keyword location: Location of the network security perimeter. + :paramtype location: str + """ + super().__init__(**kwargs) + self.id = id + self.perimeter_guid = perimeter_guid + self.location = location + + +class NetworkSecurityPerimeterConfiguration(ProxyResourceAutoGenerated): + """Network security perimeter (NSP) configuration resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData + :ivar properties: Network security configuration properties. + :vartype properties: + ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "NetworkSecurityPerimeterConfigurationProperties"}, + } + + def __init__( + self, *, properties: Optional["_models.NetworkSecurityPerimeterConfigurationProperties"] = None, **kwargs: Any + ) -> None: + """ + :keyword properties: Network security configuration properties. + :paramtype properties: + ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class NetworkSecurityPerimeterConfigurationListResult(_serialization.Model): # pylint: disable=name-too-long + """Result of a list NSP (network security perimeter) configurations request. + + :ivar value: Array of network security perimeter results. + :vartype value: list[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration] + :ivar next_link: The link used to get the next page of results. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[NetworkSecurityPerimeterConfiguration]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, + *, + value: Optional[list["_models.NetworkSecurityPerimeterConfiguration"]] = None, + next_link: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword value: Array of network security perimeter results. + :paramtype value: list[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration] + :keyword next_link: The link used to get the next page of results. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class NetworkSecurityPerimeterConfigurationProperties(_serialization.Model): # pylint: disable=name-too-long + """Network security configuration properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Provisioning state of a network security perimeter configuration that + is being created or updated. Known values are: "Succeeded", "Creating", "Updating", "Deleting", + "Accepted", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfigurationProvisioningState + :ivar provisioning_issues: List of provisioning issues, if any. + :vartype provisioning_issues: list[~azure.mgmt.loganalytics.models.ProvisioningIssue] + :ivar network_security_perimeter: Information about a network security perimeter (NSP). + :vartype network_security_perimeter: ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeter + :ivar resource_association: Information about resource association. + :vartype resource_association: ~azure.mgmt.loganalytics.models.ResourceAssociation + :ivar profile: Network security perimeter configuration profile. + :vartype profile: ~azure.mgmt.loganalytics.models.NetworkSecurityProfile + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "provisioning_issues": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "provisioning_issues": {"key": "provisioningIssues", "type": "[ProvisioningIssue]"}, + "network_security_perimeter": {"key": "networkSecurityPerimeter", "type": "NetworkSecurityPerimeter"}, + "resource_association": {"key": "resourceAssociation", "type": "ResourceAssociation"}, + "profile": {"key": "profile", "type": "NetworkSecurityProfile"}, + } + + def __init__( + self, + *, + network_security_perimeter: Optional["_models.NetworkSecurityPerimeter"] = None, + resource_association: Optional["_models.ResourceAssociation"] = None, + profile: Optional["_models.NetworkSecurityProfile"] = None, + **kwargs: Any + ) -> None: + """ + :keyword network_security_perimeter: Information about a network security perimeter (NSP). + :paramtype network_security_perimeter: ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeter + :keyword resource_association: Information about resource association. + :paramtype resource_association: ~azure.mgmt.loganalytics.models.ResourceAssociation + :keyword profile: Network security perimeter configuration profile. + :paramtype profile: ~azure.mgmt.loganalytics.models.NetworkSecurityProfile + """ + super().__init__(**kwargs) + self.provisioning_state: Optional[ + Union[str, "_models.NetworkSecurityPerimeterConfigurationProvisioningState"] + ] = None + self.provisioning_issues: Optional[list["_models.ProvisioningIssue"]] = None + self.network_security_perimeter = network_security_perimeter + self.resource_association = resource_association + self.profile = profile + + +class NetworkSecurityProfile(_serialization.Model): + """Network security perimeter configuration profile. + + :ivar name: Name of the profile. + :vartype name: str + :ivar access_rules_version: Current access rules version. + :vartype access_rules_version: int + :ivar access_rules: List of Access Rules. + :vartype access_rules: list[~azure.mgmt.loganalytics.models.AccessRule] + :ivar diagnostic_settings_version: Current diagnostic settings version. + :vartype diagnostic_settings_version: int + :ivar enabled_log_categories: List of log categories that are enabled. + :vartype enabled_log_categories: list[str] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "access_rules_version": {"key": "accessRulesVersion", "type": "int"}, + "access_rules": {"key": "accessRules", "type": "[AccessRule]"}, + "diagnostic_settings_version": {"key": "diagnosticSettingsVersion", "type": "int"}, + "enabled_log_categories": {"key": "enabledLogCategories", "type": "[str]"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + access_rules_version: Optional[int] = None, + access_rules: Optional[list["_models.AccessRule"]] = None, + diagnostic_settings_version: Optional[int] = None, + enabled_log_categories: Optional[list[str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Name of the profile. + :paramtype name: str + :keyword access_rules_version: Current access rules version. + :paramtype access_rules_version: int + :keyword access_rules: List of Access Rules. + :paramtype access_rules: list[~azure.mgmt.loganalytics.models.AccessRule] + :keyword diagnostic_settings_version: Current diagnostic settings version. + :paramtype diagnostic_settings_version: int + :keyword enabled_log_categories: List of log categories that are enabled. + :paramtype enabled_log_categories: list[str] + """ + super().__init__(**kwargs) + self.name = name + self.access_rules_version = access_rules_version + self.access_rules = access_rules + self.diagnostic_settings_version = diagnostic_settings_version + self.enabled_log_categories = enabled_log_categories + + class Operation(_serialization.Model): """Supported operation of OperationalInsights resource provider. @@ -2014,7 +2598,7 @@ class OperationListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: Optional[List["_models.Operation"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.Operation"]] = None, **kwargs: Any) -> None: """ :keyword value: List of solution operations supported by the OperationsManagement resource provider. @@ -2022,7 +2606,7 @@ def __init__(self, *, value: Optional[List["_models.Operation"]] = None, **kwarg """ super().__init__(**kwargs) self.value = value - self.next_link = None + self.next_link: Optional[str] = None class OperationStatus(_serialization.Model): @@ -2085,30 +2669,145 @@ def __init__( self.error = error -class PrivateLinkScopedResource(_serialization.Model): - """The private link scope resource reference. +class PrivateLinkScopedResource(_serialization.Model): + """The private link scope resource reference. + + :ivar resource_id: The full resource Id of the private link scope resource. + :vartype resource_id: str + :ivar scope_id: The private link scope unique Identifier. + :vartype scope_id: str + """ + + _attribute_map = { + "resource_id": {"key": "resourceId", "type": "str"}, + "scope_id": {"key": "scopeId", "type": "str"}, + } + + def __init__(self, *, resource_id: Optional[str] = None, scope_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword resource_id: The full resource Id of the private link scope resource. + :paramtype resource_id: str + :keyword scope_id: The private link scope unique Identifier. + :paramtype scope_id: str + """ + super().__init__(**kwargs) + self.resource_id = resource_id + self.scope_id = scope_id + + +class ProvisioningIssue(_serialization.Model): + """Describes a provisioning issue for a network security perimeter configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the issue. + :vartype name: str + :ivar properties: Details of a provisioning issue for a network security perimeter (NSP) + configuration. Resource providers should generate separate provisioning issue elements for each + separate issue detected, and include a meaningful and distinctive description, as well as any + appropriate suggestedResourceIds and suggestedAccessRules. + :vartype properties: ~azure.mgmt.loganalytics.models.ProvisioningIssueProperties + """ + + _validation = { + "name": {"readonly": True}, + "properties": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "ProvisioningIssueProperties"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.name: Optional[str] = None + self.properties: Optional["_models.ProvisioningIssueProperties"] = None + + +class ProvisioningIssueProperties(_serialization.Model): + """Details of a provisioning issue for a network security perimeter (NSP) configuration. Resource + providers should generate separate provisioning issue elements for each separate issue + detected, and include a meaningful and distinctive description, as well as any appropriate + suggestedResourceIds and suggestedAccessRules. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar issue_type: Type of issue. Known values are: "Unknown", + "ConfigurationPropagationFailure", "MissingPerimeterConfiguration", and + "MissingIdentityConfiguration". + :vartype issue_type: str or ~azure.mgmt.loganalytics.models.IssueType + :ivar severity: Severity of the issue. Known values are: "Warning" and "Error". + :vartype severity: str or ~azure.mgmt.loganalytics.models.Severity + :ivar description: Description of the issue. + :vartype description: str + :ivar suggested_resource_ids: Fully qualified resource IDs of suggested resources that can be + associated to the network security perimeter (NSP) to remediate the issue. + :vartype suggested_resource_ids: list[str] + :ivar suggested_access_rules: Access rules that can be added to the network security profile + (NSP) to remediate the issue. + :vartype suggested_access_rules: list[~azure.mgmt.loganalytics.models.AccessRule] + """ + + _validation = { + "issue_type": {"readonly": True}, + "severity": {"readonly": True}, + "description": {"readonly": True}, + "suggested_resource_ids": {"readonly": True}, + "suggested_access_rules": {"readonly": True}, + } + + _attribute_map = { + "issue_type": {"key": "issueType", "type": "str"}, + "severity": {"key": "severity", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "suggested_resource_ids": {"key": "suggestedResourceIds", "type": "[str]"}, + "suggested_access_rules": {"key": "suggestedAccessRules", "type": "[AccessRule]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.issue_type: Optional[Union[str, "_models.IssueType"]] = None + self.severity: Optional[Union[str, "_models.Severity"]] = None + self.description: Optional[str] = None + self.suggested_resource_ids: Optional[list[str]] = None + self.suggested_access_rules: Optional[list["_models.AccessRule"]] = None + + +class ResourceAssociation(_serialization.Model): + """Information about resource association. - :ivar resource_id: The full resource Id of the private link scope resource. - :vartype resource_id: str - :ivar scope_id: The private link scope unique Identifier. - :vartype scope_id: str + :ivar name: Name of the resource association. + :vartype name: str + :ivar access_mode: Access mode of the resource association. Known values are: "Enforced", + "Learning", and "Audit". + :vartype access_mode: str or ~azure.mgmt.loganalytics.models.ResourceAssociationAccessMode """ _attribute_map = { - "resource_id": {"key": "resourceId", "type": "str"}, - "scope_id": {"key": "scopeId", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "access_mode": {"key": "accessMode", "type": "str"}, } - def __init__(self, *, resource_id: Optional[str] = None, scope_id: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + name: Optional[str] = None, + access_mode: Optional[Union[str, "_models.ResourceAssociationAccessMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword resource_id: The full resource Id of the private link scope resource. - :paramtype resource_id: str - :keyword scope_id: The private link scope unique Identifier. - :paramtype scope_id: str + :keyword name: Name of the resource association. + :paramtype name: str + :keyword access_mode: Access mode of the resource association. Known values are: "Enforced", + "Learning", and "Audit". + :paramtype access_mode: str or ~azure.mgmt.loganalytics.models.ResourceAssociationAccessMode """ super().__init__(**kwargs) - self.resource_id = resource_id - self.scope_id = scope_id + self.name = name + self.access_mode = access_mode class RestoredLogs(_serialization.Model): @@ -2157,7 +2856,7 @@ def __init__( self.start_restore_time = start_restore_time self.end_restore_time = end_restore_time self.source_table = source_table - self.azure_async_operation_id = None + self.azure_async_operation_id: Optional[str] = None class ResultStatistics(_serialization.Model): @@ -2188,12 +2887,79 @@ class ResultStatistics(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.progress = None - self.ingested_records = None - self.scanned_gb = None + self.progress: Optional[float] = None + self.ingested_records: Optional[int] = None + self.scanned_gb: Optional[float] = None + + +class RuleDefinition(_serialization.Model): + """Rule definition parameters. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar query: Summary rule query. + :vartype query: str + :ivar bin_size: Scheduled window in minutes. Allowed values: 20, 30, 60, 120, 180, 360, 720, + 1440. + :vartype bin_size: int + :ivar bin_delay: The minimum delay in seconds before bin processing. + :vartype bin_delay: int + :ivar bin_start_time: The start time (UTC) when Summary rule execution starts. + :vartype bin_start_time: ~datetime.datetime + :ivar time_selector: The time cursor used in Summary rules bins processing, e.g. TimeGenerated. + "TimeGenerated" + :vartype time_selector: str or ~azure.mgmt.loganalytics.models.TimeSelectorEnum + :ivar destination_table: The destination table used for the Summary rule results. + :vartype destination_table: str + """ + + _validation = { + "destination_table": {"readonly": True}, + } + + _attribute_map = { + "query": {"key": "query", "type": "str"}, + "bin_size": {"key": "binSize", "type": "int"}, + "bin_delay": {"key": "binDelay", "type": "int"}, + "bin_start_time": {"key": "binStartTime", "type": "iso-8601"}, + "time_selector": {"key": "timeSelector", "type": "str"}, + "destination_table": {"key": "destinationTable", "type": "str"}, + } + + def __init__( + self, + *, + query: Optional[str] = None, + bin_size: Optional[int] = None, + bin_delay: Optional[int] = None, + bin_start_time: Optional[datetime.datetime] = None, + time_selector: Optional[Union[str, "_models.TimeSelectorEnum"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword query: Summary rule query. + :paramtype query: str + :keyword bin_size: Scheduled window in minutes. Allowed values: 20, 30, 60, 120, 180, 360, 720, + 1440. + :paramtype bin_size: int + :keyword bin_delay: The minimum delay in seconds before bin processing. + :paramtype bin_delay: int + :keyword bin_start_time: The start time (UTC) when Summary rule execution starts. + :paramtype bin_start_time: ~datetime.datetime + :keyword time_selector: The time cursor used in Summary rules bins processing, e.g. + TimeGenerated. "TimeGenerated" + :paramtype time_selector: str or ~azure.mgmt.loganalytics.models.TimeSelectorEnum + """ + super().__init__(**kwargs) + self.query = query + self.bin_size = bin_size + self.bin_delay = bin_delay + self.bin_start_time = bin_start_time + self.time_selector = time_selector + self.destination_table: Optional[str] = None -class SavedSearch(ProxyResource): # pylint: disable=too-many-instance-attributes +class SavedSearch(ProxyResource): """Value object for saved search results. Variables are only populated by the server, and will be ignored when sending a request. @@ -2201,7 +2967,7 @@ class SavedSearch(ProxyResource): # pylint: disable=too-many-instance-attribute All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2265,7 +3031,7 @@ def __init__( function_alias: Optional[str] = None, function_parameters: Optional[str] = None, version: Optional[int] = None, - tags: Optional[List["_models.Tag"]] = None, + tags: Optional[list["_models.Tag"]] = None, **kwargs: Any ) -> None: """ @@ -2314,7 +3080,7 @@ class SavedSearchesListResult(_serialization.Model): "value": {"key": "value", "type": "[SavedSearch]"}, } - def __init__(self, *, value: Optional[List["_models.SavedSearch"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.SavedSearch"]] = None, **kwargs: Any) -> None: """ :keyword value: The array of result values. :paramtype value: list[~azure.mgmt.loganalytics.models.SavedSearch] @@ -2323,7 +3089,7 @@ def __init__(self, *, value: Optional[List["_models.SavedSearch"]] = None, **kwa self.value = value -class Schema(_serialization.Model): # pylint: disable=too-many-instance-attributes +class Schema(_serialization.Model): """Table's schema. Variables are only populated by the server, and will be ignored when sending a request. @@ -2385,7 +3151,7 @@ def __init__( name: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, - columns: Optional[List["_models.Column"]] = None, + columns: Optional[list["_models.Column"]] = None, **kwargs: Any ) -> None: """ @@ -2403,13 +3169,13 @@ def __init__( self.display_name = display_name self.description = description self.columns = columns - self.standard_columns = None - self.categories = None - self.labels = None - self.source = None - self.table_type = None - self.table_sub_type = None - self.solutions = None + self.standard_columns: Optional[list["_models.Column"]] = None + self.categories: Optional[list[str]] = None + self.labels: Optional[list[str]] = None + self.source: Optional[Union[str, "_models.SourceEnum"]] = None + self.table_type: Optional[Union[str, "_models.TableTypeEnum"]] = None + self.table_sub_type: Optional[Union[str, "_models.TableSubTypeEnum"]] = None + self.solutions: Optional[list[str]] = None class SearchGetSchemaResponse(_serialization.Model): @@ -2430,7 +3196,7 @@ def __init__( self, *, metadata: Optional["_models.SearchMetadata"] = None, - value: Optional[List["_models.SearchSchemaValue"]] = None, + value: Optional[list["_models.SearchSchemaValue"]] = None, **kwargs: Any ) -> None: """ @@ -2444,7 +3210,7 @@ def __init__( self.value = value -class SearchMetadata(_serialization.Model): # pylint: disable=too-many-instance-attributes +class SearchMetadata(_serialization.Model): """Metadata for search results. :ivar search_id: The request id of the search. @@ -2511,12 +3277,12 @@ def __init__( total: Optional[int] = None, top: Optional[int] = None, id: Optional[str] = None, # pylint: disable=redefined-builtin - core_summaries: Optional[List["_models.CoreSummary"]] = None, + core_summaries: Optional[list["_models.CoreSummary"]] = None, status: Optional[str] = None, start_time: Optional[datetime.datetime] = None, last_updated: Optional[datetime.datetime] = None, e_tag: Optional[str] = None, - sort: Optional[List["_models.SearchSort"]] = None, + sort: Optional[list["_models.SearchSort"]] = None, request_time: Optional[int] = None, aggregated_value_field: Optional[str] = None, aggregated_grouping_fields: Optional[str] = None, @@ -2671,8 +3437,8 @@ def __init__( self.limit = limit self.start_search_time = start_search_time self.end_search_time = end_search_time - self.source_table = None - self.azure_async_operation_id = None + self.source_table: Optional[str] = None + self.azure_async_operation_id: Optional[str] = None class SearchSchemaValue(_serialization.Model): @@ -2721,7 +3487,7 @@ def __init__( name: Optional[str] = None, display_name: Optional[str] = None, type: Optional[str] = None, - owner_type: Optional[List[str]] = None, + owner_type: Optional[list[str]] = None, **kwargs: Any ) -> None: """ @@ -2845,7 +3611,7 @@ class StorageInsight(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2889,9 +3655,9 @@ def __init__( self, *, e_tag: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - containers: Optional[List[str]] = None, - tables: Optional[List[str]] = None, + tags: Optional[dict[str, str]] = None, + containers: Optional[list[str]] = None, + tables: Optional[list[str]] = None, storage_account: Optional["_models.StorageAccount"] = None, **kwargs: Any ) -> None: @@ -2913,7 +3679,7 @@ def __init__( self.containers = containers self.tables = tables self.storage_account = storage_account - self.status = None + self.status: Optional["_models.StorageInsightStatus"] = None class StorageInsightListResult(_serialization.Model): @@ -2933,7 +3699,7 @@ class StorageInsightListResult(_serialization.Model): def __init__( self, *, - value: Optional[List["_models.StorageInsight"]] = None, + value: Optional[list["_models.StorageInsight"]] = None, odata_next_link: Optional[str] = None, **kwargs: Any ) -> None: @@ -2984,73 +3750,172 @@ def __init__( self.description = description -class SystemData(_serialization.Model): - """Read only system data. +class SummaryLogs(ProxyResourceAutoGenerated): + """Workspace data summary rules definition. - :ivar created_by: An identifier for the identity that created the resource. - :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource. Known values are: - "user", "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and "None". - :vartype created_by_type: str or ~azure.mgmt.loganalytics.models.IdentityType - :ivar created_at: The timestamp of resource creation (UTC). - :vartype created_at: ~datetime.datetime - :ivar last_modified_by: An identifier for the identity that last modified the resource. - :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the resource. Known values - are: "user", "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and - "None". - :vartype last_modified_by_type: str or ~azure.mgmt.loganalytics.models.IdentityType - :ivar last_modified_at: The timestamp of resource last modification (UTC). - :vartype last_modified_at: ~datetime.datetime + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData + :ivar rule_type: SummaryRules rule type: User. "User" + :vartype rule_type: str or ~azure.mgmt.loganalytics.models.RuleTypeEnum + :ivar display_name: The display name of the Summary rule. + :vartype display_name: str + :ivar description: The description of the Summary rule. + :vartype description: str + :ivar is_active: Indicates if Summary rule is active. If not, Summary rule execution stops. + :vartype is_active: bool + :ivar status_code: Indicates the reason for rule deactivation. Known values are: "UserAction" + and "DataPlaneError". + :vartype status_code: str or ~azure.mgmt.loganalytics.models.StatusCodeEnum + :ivar provisioning_state: Summary rule is in provisioning state. If set to 'updating' or + 'deleting', indicates a resource lock due to an ongoing operation, preventing any update to the + Summary rule until the operation is complete. Known values are: "Updating", "InProgress", + "Succeeded", and "Deleting". + :vartype provisioning_state: str or ~azure.mgmt.loganalytics.models.ProvisioningStateEnum + :ivar rule_definition: Rule definition parameters. + :vartype rule_definition: ~azure.mgmt.loganalytics.models.RuleDefinition """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "is_active": {"readonly": True}, + "status_code": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + _attribute_map = { - "created_by": {"key": "createdBy", "type": "str"}, - "created_by_type": {"key": "createdByType", "type": "str"}, - "created_at": {"key": "createdAt", "type": "iso-8601"}, - "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, - "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, - "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "rule_type": {"key": "properties.ruleType", "type": "str"}, + "display_name": {"key": "properties.displayName", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "is_active": {"key": "properties.isActive", "type": "bool"}, + "status_code": {"key": "properties.statusCode", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "rule_definition": {"key": "properties.ruleDefinition", "type": "RuleDefinition"}, } def __init__( self, *, - created_by: Optional[str] = None, - created_by_type: Optional[Union[str, "_models.IdentityType"]] = None, - created_at: Optional[datetime.datetime] = None, - last_modified_by: Optional[str] = None, - last_modified_by_type: Optional[Union[str, "_models.IdentityType"]] = None, - last_modified_at: Optional[datetime.datetime] = None, + rule_type: Optional[Union[str, "_models.RuleTypeEnum"]] = None, + display_name: Optional[str] = None, + description: Optional[str] = None, + rule_definition: Optional["_models.RuleDefinition"] = None, **kwargs: Any ) -> None: """ - :keyword created_by: An identifier for the identity that created the resource. - :paramtype created_by: str - :keyword created_by_type: The type of identity that created the resource. Known values are: - "user", "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", and "None". - :paramtype created_by_type: str or ~azure.mgmt.loganalytics.models.IdentityType - :keyword created_at: The timestamp of resource creation (UTC). - :paramtype created_at: ~datetime.datetime - :keyword last_modified_by: An identifier for the identity that last modified the resource. - :paramtype last_modified_by: str - :keyword last_modified_by_type: The type of identity that last modified the resource. Known - values are: "user", "application", "managedIdentity", "key", "SystemAssigned", "UserAssigned", - and "None". - :paramtype last_modified_by_type: str or ~azure.mgmt.loganalytics.models.IdentityType - :keyword last_modified_at: The timestamp of resource last modification (UTC). - :paramtype last_modified_at: ~datetime.datetime + :keyword rule_type: SummaryRules rule type: User. "User" + :paramtype rule_type: str or ~azure.mgmt.loganalytics.models.RuleTypeEnum + :keyword display_name: The display name of the Summary rule. + :paramtype display_name: str + :keyword description: The description of the Summary rule. + :paramtype description: str + :keyword rule_definition: Rule definition parameters. + :paramtype rule_definition: ~azure.mgmt.loganalytics.models.RuleDefinition """ super().__init__(**kwargs) - self.created_by = created_by - self.created_by_type = created_by_type - self.created_at = created_at - self.last_modified_by = last_modified_by - self.last_modified_by_type = last_modified_by_type - self.last_modified_at = last_modified_at + self.rule_type = rule_type + self.display_name = display_name + self.description = description + self.is_active: Optional[bool] = None + self.status_code: Optional[Union[str, "_models.StatusCodeEnum"]] = None + self.provisioning_state: Optional[Union[str, "_models.ProvisioningStateEnum"]] = None + self.rule_definition = rule_definition + + +class SummaryLogsListResult(_serialization.Model): + """The list Summary rule operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of Summary rules. + :vartype value: list[~azure.mgmt.loganalytics.models.SummaryLogs] + :ivar next_link: URL to retrieve the next set of operation list results, if available. + :vartype next_link: str + """ + + _validation = { + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[SummaryLogs]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, *, value: Optional[list["_models.SummaryLogs"]] = None, **kwargs: Any) -> None: + """ + :keyword value: A list of Summary rules. + :paramtype value: list[~azure.mgmt.loganalytics.models.SummaryLogs] + """ + super().__init__(**kwargs) + self.value = value + self.next_link: Optional[str] = None + + +class SummaryLogsRetryBin(_serialization.Model): + """Request to retry a summary logs bin. + + :ivar properties: Retry bin properties. + :vartype properties: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBinProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "SummaryLogsRetryBinProperties"}, + } + + def __init__(self, *, properties: Optional["_models.SummaryLogsRetryBinProperties"] = None, **kwargs: Any) -> None: + """ + :keyword properties: Retry bin properties. + :paramtype properties: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBinProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class SummaryLogsRetryBinProperties(_serialization.Model): + """Properties for retrying a Summary rule bin. + + All required parameters must be populated in order to send to server. + + :ivar retry_bin_start_time: The time (UTC) of the bin to retry. Required. + :vartype retry_bin_start_time: ~datetime.datetime + """ + + _validation = { + "retry_bin_start_time": {"required": True}, + } + _attribute_map = { + "retry_bin_start_time": {"key": "retryBinStartTime", "type": "iso-8601"}, + } + + def __init__(self, *, retry_bin_start_time: datetime.datetime, **kwargs: Any) -> None: + """ + :keyword retry_bin_start_time: The time (UTC) of the bin to retry. Required. + :paramtype retry_bin_start_time: ~datetime.datetime + """ + super().__init__(**kwargs) + self.retry_bin_start_time = retry_bin_start_time -class SystemDataAutoGenerated(_serialization.Model): + +class SystemData(_serialization.Model): """Metadata pertaining to creation and last modification of the resource. :ivar created_by: The identity that created the resource. @@ -3114,13 +3979,13 @@ def __init__( self.last_modified_at = last_modified_at -class Table(ProxyResource): # pylint: disable=too-many-instance-attributes +class Table(ProxyResource): """Workspace data table definition. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3128,14 +3993,15 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes "Microsoft.Storage/storageAccounts". :vartype type: str :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.loganalytics.models.SystemDataAutoGenerated - :ivar retention_in_days: The table retention in days, between 4 and 730. Setting this property - to -1 will default to the workspace retention. + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData + :ivar retention_in_days: In Analytics table: the tables analytics retention in days, between 4 + and 730. Setting this property to -1 will default to the workspace retention. In Basic and + Auxiliary table: read only property. :vartype retention_in_days: int :ivar total_retention_in_days: The table total retention in days, between 4 and 4383. Setting - this property to -1 will default to table retention. + this property to -1 will default to retentionInDays. :vartype total_retention_in_days: int - :ivar archive_retention_in_days: The table data archive retention in days. Calculated as + :ivar archive_retention_in_days: The tables long-term retention in days. Calculated as (totalRetentionInDays-retentionInDays). :vartype archive_retention_in_days: int :ivar search_results: Parameters of the search job that initiated this table. @@ -3145,7 +4011,7 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes :ivar result_statistics: Search job execution statistics. :vartype result_statistics: ~azure.mgmt.loganalytics.models.ResultStatistics :ivar plan: Instruct the system how to handle and charge the logs ingested to this table. Known - values are: "Basic" and "Analytics". + values are: "Basic", "Analytics", and "Auxiliary". :vartype plan: str or ~azure.mgmt.loganalytics.models.TablePlanEnum :ivar last_plan_modified_date: The timestamp that table plan was last modified (UTC). :vartype last_plan_modified_date: str @@ -3183,7 +4049,7 @@ class Table(ProxyResource): # pylint: disable=too-many-instance-attributes "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, + "system_data": {"key": "systemData", "type": "SystemData"}, "retention_in_days": {"key": "properties.retentionInDays", "type": "int"}, "total_retention_in_days": {"key": "properties.totalRetentionInDays", "type": "int"}, "archive_retention_in_days": {"key": "properties.archiveRetentionInDays", "type": "int"}, @@ -3210,36 +4076,37 @@ def __init__( **kwargs: Any ) -> None: """ - :keyword retention_in_days: The table retention in days, between 4 and 730. Setting this - property to -1 will default to the workspace retention. + :keyword retention_in_days: In Analytics table: the tables analytics retention in days, between + 4 and 730. Setting this property to -1 will default to the workspace retention. In Basic and + Auxiliary table: read only property. :paramtype retention_in_days: int :keyword total_retention_in_days: The table total retention in days, between 4 and 4383. - Setting this property to -1 will default to table retention. + Setting this property to -1 will default to retentionInDays. :paramtype total_retention_in_days: int :keyword search_results: Parameters of the search job that initiated this table. :paramtype search_results: ~azure.mgmt.loganalytics.models.SearchResults :keyword restored_logs: Parameters of the restore operation that initiated this table. :paramtype restored_logs: ~azure.mgmt.loganalytics.models.RestoredLogs :keyword plan: Instruct the system how to handle and charge the logs ingested to this table. - Known values are: "Basic" and "Analytics". + Known values are: "Basic", "Analytics", and "Auxiliary". :paramtype plan: str or ~azure.mgmt.loganalytics.models.TablePlanEnum :keyword schema: Table schema. :paramtype schema: ~azure.mgmt.loganalytics.models.Schema """ super().__init__(**kwargs) - self.system_data = None + self.system_data: Optional["_models.SystemData"] = None self.retention_in_days = retention_in_days self.total_retention_in_days = total_retention_in_days - self.archive_retention_in_days = None + self.archive_retention_in_days: Optional[int] = None self.search_results = search_results self.restored_logs = restored_logs - self.result_statistics = None + self.result_statistics: Optional["_models.ResultStatistics"] = None self.plan = plan - self.last_plan_modified_date = None + self.last_plan_modified_date: Optional[str] = None self.schema = schema - self.provisioning_state = None - self.retention_in_days_as_default = None - self.total_retention_in_days_as_default = None + self.provisioning_state: Optional[Union[str, "_models.ProvisioningStateEnum"]] = None + self.retention_in_days_as_default: Optional[bool] = None + self.total_retention_in_days_as_default: Optional[bool] = None class TablesListResult(_serialization.Model): @@ -3253,7 +4120,7 @@ class TablesListResult(_serialization.Model): "value": {"key": "value", "type": "[Table]"}, } - def __init__(self, *, value: Optional[List["_models.Table"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.Table"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of data tables. :paramtype value: list[~azure.mgmt.loganalytics.models.Table] @@ -3307,7 +4174,7 @@ class TagsResource(_serialization.Model): "tags": {"key": "tags", "type": "{str}"}, } - def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + def __init__(self, *, tags: Optional[dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -3401,8 +4268,8 @@ class UserAssignedIdentity(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.principal_id = None - self.client_id = None + self.principal_id: Optional[str] = None + self.client_id: Optional[str] = None class UserIdentityProperties(_serialization.Model): @@ -3429,11 +4296,11 @@ class UserIdentityProperties(_serialization.Model): def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.principal_id = None - self.client_id = None + self.principal_id: Optional[str] = None + self.client_id: Optional[str] = None -class Workspace(TrackedResource): # pylint: disable=too-many-instance-attributes +class Workspace(TrackedResource): """The top level Workspace resource container. Variables are only populated by the server, and will be ignored when sending a request. @@ -3441,7 +4308,7 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute All required parameters must be populated in order to send to server. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3455,7 +4322,7 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute :ivar identity: The identity of the resource. :vartype identity: ~azure.mgmt.loganalytics.models.Identity :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.loganalytics.models.SystemDataAutoGenerated + :vartype system_data: ~azure.mgmt.loganalytics.models.SystemData :ivar etag: The etag of the workspace. :vartype etag: str :ivar provisioning_state: The provisioning state of the workspace. Known values are: @@ -3473,15 +4340,15 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute :ivar workspace_capping: The daily volume cap for ingestion. :vartype workspace_capping: ~azure.mgmt.loganalytics.models.WorkspaceCapping :ivar created_date: Workspace creation date. - :vartype created_date: str + :vartype created_date: ~datetime.datetime :ivar modified_date: Workspace modification date. - :vartype modified_date: str + :vartype modified_date: ~datetime.datetime :ivar public_network_access_for_ingestion: The network access type for accessing Log Analytics - ingestion. Known values are: "Enabled" and "Disabled". + ingestion. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :vartype public_network_access_for_ingestion: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :ivar public_network_access_for_query: The network access type for accessing Log Analytics - query. Known values are: "Enabled" and "Disabled". + query. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :vartype public_network_access_for_query: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :ivar force_cmk_for_query: Indicates whether customer managed storage is mandatory for query @@ -3494,8 +4361,12 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute :vartype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :ivar default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. :vartype default_data_collection_rule_resource_id: str + :ivar replication: workspace replication properties. + :vartype replication: ~azure.mgmt.loganalytics.models.WorkspaceReplicationProperties + :ivar failover: workspace failover properties. + :vartype failover: ~azure.mgmt.loganalytics.models.WorkspaceFailoverProperties """ _validation = { @@ -3518,15 +4389,15 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute "tags": {"key": "tags", "type": "{str}"}, "location": {"key": "location", "type": "str"}, "identity": {"key": "identity", "type": "Identity"}, - "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, + "system_data": {"key": "systemData", "type": "SystemData"}, "etag": {"key": "etag", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "customer_id": {"key": "properties.customerId", "type": "str"}, "sku": {"key": "properties.sku", "type": "WorkspaceSku"}, "retention_in_days": {"key": "properties.retentionInDays", "type": "int"}, "workspace_capping": {"key": "properties.workspaceCapping", "type": "WorkspaceCapping"}, - "created_date": {"key": "properties.createdDate", "type": "str"}, - "modified_date": {"key": "properties.modifiedDate", "type": "str"}, + "created_date": {"key": "properties.createdDate", "type": "iso-8601"}, + "modified_date": {"key": "properties.modifiedDate", "type": "iso-8601"}, "public_network_access_for_ingestion": {"key": "properties.publicNetworkAccessForIngestion", "type": "str"}, "public_network_access_for_query": {"key": "properties.publicNetworkAccessForQuery", "type": "str"}, "force_cmk_for_query": {"key": "properties.forceCmkForQuery", "type": "bool"}, @@ -3539,13 +4410,15 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute "key": "properties.defaultDataCollectionRuleResourceId", "type": "str", }, + "replication": {"key": "properties.replication", "type": "WorkspaceReplicationProperties"}, + "failover": {"key": "properties.failover", "type": "WorkspaceFailoverProperties"}, } def __init__( self, *, location: str, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, identity: Optional["_models.Identity"] = None, etag: Optional[str] = None, sku: Optional["_models.WorkspaceSku"] = None, @@ -3556,6 +4429,8 @@ def __init__( force_cmk_for_query: Optional[bool] = None, features: Optional["_models.WorkspaceFeatures"] = None, default_data_collection_rule_resource_id: Optional[str] = None, + replication: Optional["_models.WorkspaceReplicationProperties"] = None, + failover: Optional["_models.WorkspaceFailoverProperties"] = None, **kwargs: Any ) -> None: """ @@ -3575,11 +4450,11 @@ def __init__( :keyword workspace_capping: The daily volume cap for ingestion. :paramtype workspace_capping: ~azure.mgmt.loganalytics.models.WorkspaceCapping :keyword public_network_access_for_ingestion: The network access type for accessing Log - Analytics ingestion. Known values are: "Enabled" and "Disabled". + Analytics ingestion. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :paramtype public_network_access_for_ingestion: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :keyword public_network_access_for_query: The network access type for accessing Log Analytics - query. Known values are: "Enabled" and "Disabled". + query. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :paramtype public_network_access_for_query: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :keyword force_cmk_for_query: Indicates whether customer managed storage is mandatory for query @@ -3589,26 +4464,32 @@ def __init__( :paramtype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :keyword default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. :paramtype default_data_collection_rule_resource_id: str + :keyword replication: workspace replication properties. + :paramtype replication: ~azure.mgmt.loganalytics.models.WorkspaceReplicationProperties + :keyword failover: workspace failover properties. + :paramtype failover: ~azure.mgmt.loganalytics.models.WorkspaceFailoverProperties """ super().__init__(tags=tags, location=location, **kwargs) self.identity = identity - self.system_data = None + self.system_data: Optional["_models.SystemData"] = None self.etag = etag - self.provisioning_state = None - self.customer_id = None + self.provisioning_state: Optional[Union[str, "_models.WorkspaceEntityStatus"]] = None + self.customer_id: Optional[str] = None self.sku = sku self.retention_in_days = retention_in_days self.workspace_capping = workspace_capping - self.created_date = None - self.modified_date = None + self.created_date: Optional[datetime.datetime] = None + self.modified_date: Optional[datetime.datetime] = None self.public_network_access_for_ingestion = public_network_access_for_ingestion self.public_network_access_for_query = public_network_access_for_query self.force_cmk_for_query = force_cmk_for_query - self.private_link_scoped_resources = None + self.private_link_scoped_resources: Optional[list["_models.PrivateLinkScopedResource"]] = None self.features = features self.default_data_collection_rule_resource_id = default_data_collection_rule_resource_id + self.replication = replication + self.failover = failover class WorkspaceCapping(_serialization.Model): @@ -3644,13 +4525,44 @@ def __init__(self, *, daily_quota_gb: Optional[float] = None, **kwargs: Any) -> """ super().__init__(**kwargs) self.daily_quota_gb = daily_quota_gb - self.quota_next_reset_time = None - self.data_ingestion_status = None + self.quota_next_reset_time: Optional[str] = None + self.data_ingestion_status: Optional[Union[str, "_models.DataIngestionStatus"]] = None + + +class WorkspaceFailoverProperties(_serialization.Model): + """The failover state of the replication. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar state: The failover state of the replication. Known values are: "Inactive", "Activating", + "Active", "Deactivating", and "Failed". + :vartype state: str or ~azure.mgmt.loganalytics.models.WorkspaceFailoverState + :ivar last_modified_date: The last time when the failover state was updated. + :vartype last_modified_date: ~datetime.datetime + """ + + _validation = { + "state": {"readonly": True}, + "last_modified_date": {"readonly": True}, + } + + _attribute_map = { + "state": {"key": "state", "type": "str"}, + "last_modified_date": {"key": "lastModifiedDate", "type": "iso-8601"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.state: Optional[Union[str, "_models.WorkspaceFailoverState"]] = None + self.last_modified_date: Optional[datetime.datetime] = None class WorkspaceFeatures(_serialization.Model): """Workspace features. + Variables are only populated by the server, and will be ignored when sending a request. + :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. :vartype additional_properties: dict[str, any] @@ -3666,8 +4578,19 @@ class WorkspaceFeatures(_serialization.Model): :vartype cluster_resource_id: str :ivar disable_local_auth: Disable Non-AAD based Auth. :vartype disable_local_auth: bool + :ivar unified_sentinel_billing_only: An indication if the specify workspace is limited to + sentinel's unified billing model only. + :vartype unified_sentinel_billing_only: bool + :ivar associations: List of associations for the workspace. Indicates if the workspace is + associated with any of the following experiences: MDC, Sentinel, SentinelGraph, etc. + :vartype associations: list[str] """ + _validation = { + "unified_sentinel_billing_only": {"readonly": True}, + "associations": {"readonly": True}, + } + _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "enable_data_export": {"key": "enableDataExport", "type": "bool"}, @@ -3678,12 +4601,14 @@ class WorkspaceFeatures(_serialization.Model): }, "cluster_resource_id": {"key": "clusterResourceId", "type": "str"}, "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "unified_sentinel_billing_only": {"key": "unifiedSentinelBillingOnly", "type": "bool"}, + "associations": {"key": "associations", "type": "[str]"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, + additional_properties: Optional[dict[str, Any]] = None, enable_data_export: Optional[bool] = None, immediate_purge_data_on30_days: Optional[bool] = None, enable_log_access_using_only_resource_permissions: Optional[bool] = None, @@ -3715,6 +4640,8 @@ def __init__( self.enable_log_access_using_only_resource_permissions = enable_log_access_using_only_resource_permissions self.cluster_resource_id = cluster_resource_id self.disable_local_auth = disable_local_auth + self.unified_sentinel_billing_only: Optional[bool] = None + self.associations: Optional[list[str]] = None class WorkspaceListManagementGroupsResult(_serialization.Model): @@ -3728,7 +4655,7 @@ class WorkspaceListManagementGroupsResult(_serialization.Model): "value": {"key": "value", "type": "[ManagementGroup]"}, } - def __init__(self, *, value: Optional[List["_models.ManagementGroup"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.ManagementGroup"]] = None, **kwargs: Any) -> None: """ :keyword value: Gets or sets a list of management groups attached to the workspace. :paramtype value: list[~azure.mgmt.loganalytics.models.ManagementGroup] @@ -3748,7 +4675,7 @@ class WorkspaceListResult(_serialization.Model): "value": {"key": "value", "type": "[Workspace]"}, } - def __init__(self, *, value: Optional[List["_models.Workspace"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.Workspace"]] = None, **kwargs: Any) -> None: """ :keyword value: A list of workspaces. :paramtype value: list[~azure.mgmt.loganalytics.models.Workspace] @@ -3768,7 +4695,7 @@ class WorkspaceListUsagesResult(_serialization.Model): "value": {"key": "value", "type": "[UsageMetric]"}, } - def __init__(self, *, value: Optional[List["_models.UsageMetric"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[list["_models.UsageMetric"]] = None, **kwargs: Any) -> None: """ :keyword value: Gets or sets a list of usage metrics for a workspace. :paramtype value: list[~azure.mgmt.loganalytics.models.UsageMetric] @@ -3777,13 +4704,13 @@ def __init__(self, *, value: Optional[List["_models.UsageMetric"]] = None, **kwa self.value = value -class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance-attributes +class WorkspacePatch(AzureEntityResource): """The top level Workspace resource container. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3811,15 +4738,15 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- :ivar workspace_capping: The daily volume cap for ingestion. :vartype workspace_capping: ~azure.mgmt.loganalytics.models.WorkspaceCapping :ivar created_date: Workspace creation date. - :vartype created_date: str + :vartype created_date: ~datetime.datetime :ivar modified_date: Workspace modification date. - :vartype modified_date: str + :vartype modified_date: ~datetime.datetime :ivar public_network_access_for_ingestion: The network access type for accessing Log Analytics - ingestion. Known values are: "Enabled" and "Disabled". + ingestion. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :vartype public_network_access_for_ingestion: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :ivar public_network_access_for_query: The network access type for accessing Log Analytics - query. Known values are: "Enabled" and "Disabled". + query. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :vartype public_network_access_for_query: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :ivar force_cmk_for_query: Indicates whether customer managed storage is mandatory for query @@ -3832,8 +4759,12 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- :vartype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :ivar default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. :vartype default_data_collection_rule_resource_id: str + :ivar replication: workspace replication properties. + :vartype replication: ~azure.mgmt.loganalytics.models.WorkspaceReplicationProperties + :ivar failover: workspace failover properties. + :vartype failover: ~azure.mgmt.loganalytics.models.WorkspaceFailoverProperties """ _validation = { @@ -3860,8 +4791,8 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- "sku": {"key": "properties.sku", "type": "WorkspaceSku"}, "retention_in_days": {"key": "properties.retentionInDays", "type": "int"}, "workspace_capping": {"key": "properties.workspaceCapping", "type": "WorkspaceCapping"}, - "created_date": {"key": "properties.createdDate", "type": "str"}, - "modified_date": {"key": "properties.modifiedDate", "type": "str"}, + "created_date": {"key": "properties.createdDate", "type": "iso-8601"}, + "modified_date": {"key": "properties.modifiedDate", "type": "iso-8601"}, "public_network_access_for_ingestion": {"key": "properties.publicNetworkAccessForIngestion", "type": "str"}, "public_network_access_for_query": {"key": "properties.publicNetworkAccessForQuery", "type": "str"}, "force_cmk_for_query": {"key": "properties.forceCmkForQuery", "type": "bool"}, @@ -3874,13 +4805,15 @@ class WorkspacePatch(AzureEntityResource): # pylint: disable=too-many-instance- "key": "properties.defaultDataCollectionRuleResourceId", "type": "str", }, + "replication": {"key": "properties.replication", "type": "WorkspaceReplicationProperties"}, + "failover": {"key": "properties.failover", "type": "WorkspaceFailoverProperties"}, } def __init__( self, *, identity: Optional["_models.Identity"] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, sku: Optional["_models.WorkspaceSku"] = None, retention_in_days: Optional[int] = None, workspace_capping: Optional["_models.WorkspaceCapping"] = None, @@ -3889,6 +4822,8 @@ def __init__( force_cmk_for_query: Optional[bool] = None, features: Optional["_models.WorkspaceFeatures"] = None, default_data_collection_rule_resource_id: Optional[str] = None, + replication: Optional["_models.WorkspaceReplicationProperties"] = None, + failover: Optional["_models.WorkspaceFailoverProperties"] = None, **kwargs: Any ) -> None: """ @@ -3904,11 +4839,11 @@ def __init__( :keyword workspace_capping: The daily volume cap for ingestion. :paramtype workspace_capping: ~azure.mgmt.loganalytics.models.WorkspaceCapping :keyword public_network_access_for_ingestion: The network access type for accessing Log - Analytics ingestion. Known values are: "Enabled" and "Disabled". + Analytics ingestion. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :paramtype public_network_access_for_ingestion: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :keyword public_network_access_for_query: The network access type for accessing Log Analytics - query. Known values are: "Enabled" and "Disabled". + query. Known values are: "Enabled", "Disabled", and "SecuredByPerimeter". :paramtype public_network_access_for_query: str or ~azure.mgmt.loganalytics.models.PublicNetworkAccessType :keyword force_cmk_for_query: Indicates whether customer managed storage is mandatory for query @@ -3918,25 +4853,31 @@ def __init__( :paramtype features: ~azure.mgmt.loganalytics.models.WorkspaceFeatures :keyword default_data_collection_rule_resource_id: The resource ID of the default Data Collection Rule to use for this workspace. Expected format is - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dcrName}. :paramtype default_data_collection_rule_resource_id: str + :keyword replication: workspace replication properties. + :paramtype replication: ~azure.mgmt.loganalytics.models.WorkspaceReplicationProperties + :keyword failover: workspace failover properties. + :paramtype failover: ~azure.mgmt.loganalytics.models.WorkspaceFailoverProperties """ super().__init__(**kwargs) self.identity = identity self.tags = tags - self.provisioning_state = None - self.customer_id = None + self.provisioning_state: Optional[Union[str, "_models.WorkspaceEntityStatus"]] = None + self.customer_id: Optional[str] = None self.sku = sku self.retention_in_days = retention_in_days self.workspace_capping = workspace_capping - self.created_date = None - self.modified_date = None + self.created_date: Optional[datetime.datetime] = None + self.modified_date: Optional[datetime.datetime] = None self.public_network_access_for_ingestion = public_network_access_for_ingestion self.public_network_access_for_query = public_network_access_for_query self.force_cmk_for_query = force_cmk_for_query - self.private_link_scoped_resources = None + self.private_link_scoped_resources: Optional[list["_models.PrivateLinkScopedResource"]] = None self.features = features self.default_data_collection_rule_resource_id = default_data_collection_rule_resource_id + self.replication = replication + self.failover = failover class WorkspacePurgeBody(_serialization.Model): @@ -3961,7 +4902,7 @@ class WorkspacePurgeBody(_serialization.Model): "filters": {"key": "filters", "type": "[WorkspacePurgeBodyFilters]"}, } - def __init__(self, *, table: str, filters: List["_models.WorkspacePurgeBodyFilters"], **kwargs: Any) -> None: + def __init__(self, *, table: str, filters: list["_models.WorkspacePurgeBodyFilters"], **kwargs: Any) -> None: """ :keyword table: Table from which to purge data. Required. :paramtype table: str @@ -4084,6 +5025,108 @@ def __init__(self, *, status: Union[str, "_models.PurgeState"], **kwargs: Any) - self.status = status +class WorkspaceReplicationPatProperties(_serialization.Model): + """Workspace replication properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: The location of the replication. + :vartype location: str + :ivar enabled: Specifies whether the replication is enabled or not. When true, workspace + configuration and data is replicated to the specified location. + :vartype enabled: bool + :ivar provisioning_state: The provisioning state of the replication. Known values are: + "Succeeded", "EnableRequested", "Enabling", "DisableRequested", "Disabling", + "RollbackRequested", "RollingBack", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.loganalytics.models.WorkspaceReplicationState + :ivar created_date: The last time when the replication was enabled. + :vartype created_date: ~datetime.datetime + :ivar last_modified_date: The last time when the replication was updated. + :vartype last_modified_date: ~datetime.datetime + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "created_date": {"readonly": True}, + "last_modified_date": {"readonly": True}, + } + + _attribute_map = { + "location": {"key": "location", "type": "str"}, + "enabled": {"key": "enabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "created_date": {"key": "createdDate", "type": "iso-8601"}, + "last_modified_date": {"key": "lastModifiedDate", "type": "iso-8601"}, + } + + def __init__(self, *, location: Optional[str] = None, enabled: Optional[bool] = None, **kwargs: Any) -> None: + """ + :keyword location: The location of the replication. + :paramtype location: str + :keyword enabled: Specifies whether the replication is enabled or not. When true, workspace + configuration and data is replicated to the specified location. + :paramtype enabled: bool + """ + super().__init__(**kwargs) + self.location = location + self.enabled = enabled + self.provisioning_state: Optional[Union[str, "_models.WorkspaceReplicationState"]] = None + self.created_date: Optional[datetime.datetime] = None + self.last_modified_date: Optional[datetime.datetime] = None + + +class WorkspaceReplicationProperties(_serialization.Model): + """Workspace replication properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: The location of the replication. + :vartype location: str + :ivar enabled: Specifies whether the replication is enabled or not. When true, workspace + configuration and data is replicated to the specified location. If replication is been enabled, + location must be provided. + :vartype enabled: bool + :ivar provisioning_state: The provisioning state of the replication. Known values are: + "Succeeded", "EnableRequested", "Enabling", "DisableRequested", "Disabling", + "RollbackRequested", "RollingBack", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.loganalytics.models.WorkspaceReplicationState + :ivar created_date: The last time when the replication was enabled. + :vartype created_date: ~datetime.datetime + :ivar last_modified_date: The last time when the replication was updated. + :vartype last_modified_date: ~datetime.datetime + """ + + _validation = { + "provisioning_state": {"readonly": True}, + "created_date": {"readonly": True}, + "last_modified_date": {"readonly": True}, + } + + _attribute_map = { + "location": {"key": "location", "type": "str"}, + "enabled": {"key": "enabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "created_date": {"key": "createdDate", "type": "iso-8601"}, + "last_modified_date": {"key": "lastModifiedDate", "type": "iso-8601"}, + } + + def __init__(self, *, location: Optional[str] = None, enabled: Optional[bool] = None, **kwargs: Any) -> None: + """ + :keyword location: The location of the replication. + :paramtype location: str + :keyword enabled: Specifies whether the replication is enabled or not. When true, workspace + configuration and data is replicated to the specified location. If replication is been enabled, + location must be provided. + :paramtype enabled: bool + """ + super().__init__(**kwargs) + self.location = location + self.enabled = enabled + self.provisioning_state: Optional[Union[str, "_models.WorkspaceReplicationState"]] = None + self.created_date: Optional[datetime.datetime] = None + self.last_modified_date: Optional[datetime.datetime] = None + + class WorkspaceSku(_serialization.Model): """The SKU (tier) of a workspace. @@ -4095,12 +5138,10 @@ class WorkspaceSku(_serialization.Model): "PerNode", "PerGB2018", "Standalone", "CapacityReservation", and "LACluster". :vartype name: str or ~azure.mgmt.loganalytics.models.WorkspaceSkuNameEnum :ivar capacity_reservation_level: The capacity reservation level in GB for this workspace, when - CapacityReservation sku is selected. Known values are: 100, 200, 300, 400, 500, 1000, 2000, and - 5000. - :vartype capacity_reservation_level: int or - ~azure.mgmt.loganalytics.models.CapacityReservationLevel + CapacityReservation sku is selected. + :vartype capacity_reservation_level: int :ivar last_sku_update: The last time when the sku was updated. - :vartype last_sku_update: str + :vartype last_sku_update: ~datetime.datetime """ _validation = { @@ -4111,14 +5152,14 @@ class WorkspaceSku(_serialization.Model): _attribute_map = { "name": {"key": "name", "type": "str"}, "capacity_reservation_level": {"key": "capacityReservationLevel", "type": "int"}, - "last_sku_update": {"key": "lastSkuUpdate", "type": "str"}, + "last_sku_update": {"key": "lastSkuUpdate", "type": "iso-8601"}, } def __init__( self, *, name: Union[str, "_models.WorkspaceSkuNameEnum"], - capacity_reservation_level: Optional[Union[int, "_models.CapacityReservationLevel"]] = None, + capacity_reservation_level: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -4126,12 +5167,10 @@ def __init__( "PerNode", "PerGB2018", "Standalone", "CapacityReservation", and "LACluster". :paramtype name: str or ~azure.mgmt.loganalytics.models.WorkspaceSkuNameEnum :keyword capacity_reservation_level: The capacity reservation level in GB for this workspace, - when CapacityReservation sku is selected. Known values are: 100, 200, 300, 400, 500, 1000, - 2000, and 5000. - :paramtype capacity_reservation_level: int or - ~azure.mgmt.loganalytics.models.CapacityReservationLevel + when CapacityReservation sku is selected. + :paramtype capacity_reservation_level: int """ super().__init__(**kwargs) self.name = name self.capacity_reservation_level = capacity_reservation_level - self.last_sku_update = None + self.last_sku_update: Optional[datetime.datetime] = None diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py index 7364a7f5d3b9..501c3ecac6e4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py @@ -5,57 +5,65 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._query_packs_operations import QueryPacksOperations -from ._queries_operations import QueriesOperations -from ._data_exports_operations import DataExportsOperations -from ._data_sources_operations import DataSourcesOperations -from ._intelligence_packs_operations import IntelligencePacksOperations -from ._linked_services_operations import LinkedServicesOperations -from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations -from ._management_groups_operations import ManagementGroupsOperations -from ._operation_statuses_operations import OperationStatusesOperations -from ._shared_keys_operations import SharedKeysOperations -from ._usages_operations import UsagesOperations -from ._storage_insight_configs_operations import StorageInsightConfigsOperations -from ._saved_searches_operations import SavedSearchesOperations -from ._available_service_tiers_operations import AvailableServiceTiersOperations -from ._gateways_operations import GatewaysOperations -from ._schema_operations import SchemaOperations -from ._workspace_purge_operations import WorkspacePurgeOperations -from ._clusters_operations import ClustersOperations -from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations -from ._deleted_workspaces_operations import DeletedWorkspacesOperations -from ._tables_operations import TablesOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._available_service_tiers_operations import AvailableServiceTiersOperations # type: ignore +from ._clusters_operations import ClustersOperations # type: ignore +from ._data_exports_operations import DataExportsOperations # type: ignore +from ._data_sources_operations import DataSourcesOperations # type: ignore +from ._gateways_operations import GatewaysOperations # type: ignore +from ._intelligence_packs_operations import IntelligencePacksOperations # type: ignore +from ._linked_services_operations import LinkedServicesOperations # type: ignore +from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations # type: ignore +from ._management_groups_operations import ManagementGroupsOperations # type: ignore +from ._operations import Operations # type: ignore +from ._operation_statuses_operations import OperationStatusesOperations # type: ignore +from ._queries_operations import QueriesOperations # type: ignore +from ._query_packs_operations import QueryPacksOperations # type: ignore +from ._saved_searches_operations import SavedSearchesOperations # type: ignore +from ._schema_operations import SchemaOperations # type: ignore +from ._shared_keys_operations import SharedKeysOperations # type: ignore +from ._storage_insight_configs_operations import StorageInsightConfigsOperations # type: ignore +from ._tables_operations import TablesOperations # type: ignore +from ._usages_operations import UsagesOperations # type: ignore +from ._workspace_purge_operations import WorkspacePurgeOperations # type: ignore +from ._workspaces_operations import WorkspacesOperations # type: ignore +from ._deleted_workspaces_operations import DeletedWorkspacesOperations # type: ignore +from ._summary_logs_operations import SummaryLogsOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "QueryPacksOperations", - "QueriesOperations", + "AvailableServiceTiersOperations", + "ClustersOperations", "DataExportsOperations", "DataSourcesOperations", + "GatewaysOperations", "IntelligencePacksOperations", "LinkedServicesOperations", "LinkedStorageAccountsOperations", "ManagementGroupsOperations", + "Operations", "OperationStatusesOperations", - "SharedKeysOperations", - "UsagesOperations", - "StorageInsightConfigsOperations", + "QueriesOperations", + "QueryPacksOperations", "SavedSearchesOperations", - "AvailableServiceTiersOperations", - "GatewaysOperations", "SchemaOperations", + "SharedKeysOperations", + "StorageInsightConfigsOperations", + "TablesOperations", + "UsagesOperations", "WorkspacePurgeOperations", - "ClustersOperations", - "Operations", "WorkspacesOperations", "DeletedWorkspacesOperations", - "TablesOperations", + "SummaryLogsOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py index f5a14985d4b7..6bf8a864c713 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_available_service_tiers_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -43,14 +42,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/availableServiceTiers", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -89,12 +88,14 @@ class AvailableServiceTiersOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_workspace( @@ -111,7 +112,7 @@ def list_by_workspace( :rtype: list[~azure.mgmt.loganalytics.models.AvailableServiceTier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -122,7 +123,7 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[List[_models.AvailableServiceTier]] = kwargs.pop("cls", None) _request = build_list_by_workspace_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py index 0bdd628d0595..646d8b0fdf18 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_clusters_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -30,14 +32,12 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -47,14 +47,14 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -77,7 +77,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -105,7 +105,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -113,7 +113,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -148,14 +148,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -179,14 +179,14 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -212,7 +212,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -220,7 +220,7 @@ def build_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -254,15 +254,17 @@ class ClustersOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Cluster"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Cluster"]: """Gets Log Analytics clusters in a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -275,10 +277,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -299,7 +301,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -322,7 +335,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -330,7 +346,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Cluster"]: + def list(self, **kwargs: Any) -> ItemPaged["_models.Cluster"]: """Gets the Log Analytics clusters in a subscription. :return: An iterator like instance of either Cluster or the result of cls(response) @@ -340,10 +356,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -363,7 +379,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -386,7 +413,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -396,7 +426,7 @@ def get_next(next_link=None): def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, parameters: Union[_models.Cluster, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -407,7 +437,7 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -440,19 +470,29 @@ def _create_or_update_initial( response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -533,7 +573,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -578,7 +618,7 @@ def get_long_running_output(pipeline_response): ) def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -589,7 +629,7 @@ def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -616,7 +656,10 @@ def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -642,7 +685,7 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -692,7 +735,7 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo :rtype: ~azure.mgmt.loganalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -703,7 +746,7 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) _request = build_get_request( @@ -725,7 +768,10 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Cluster", pipeline_response.http_response) @@ -742,7 +788,7 @@ def _update_initial( parameters: Union[_models.ClusterPatch, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -753,7 +799,7 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -786,19 +832,29 @@ def _update_initial( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: try: response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -881,7 +937,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py index f7cb66c14785..d37f731a9fd4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_exports_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -45,14 +45,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -85,7 +85,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -93,7 +93,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -136,14 +136,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -177,14 +177,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -224,17 +224,19 @@ class DataExportsOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.DataExport"]: + ) -> ItemPaged["_models.DataExport"]: """Lists the data export instances within a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -249,10 +251,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataExportListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -274,7 +276,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -297,7 +310,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -389,7 +405,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -400,7 +416,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) @@ -435,7 +451,10 @@ def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("DataExport", pipeline_response.http_response) @@ -462,7 +481,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.DataExport :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -473,7 +492,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataExport] = kwargs.pop("cls", None) _request = build_get_request( @@ -496,7 +515,10 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("DataExport", pipeline_response.http_response) @@ -523,7 +545,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -534,7 +556,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -557,7 +579,10 @@ def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 404]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py index f022fbf95bf0..4794ed21e70d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_data_sources_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -45,7 +45,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -53,7 +53,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -88,12 +88,12 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -124,14 +124,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -171,14 +171,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -220,12 +220,14 @@ class DataSourcesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload def create_or_update( @@ -312,7 +314,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -323,7 +325,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) @@ -384,7 +386,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -395,7 +397,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -440,7 +442,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.DataSource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -451,7 +453,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataSource] = kwargs.pop("cls", None) _request = build_get_request( @@ -486,7 +488,7 @@ def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, filter: str, skiptoken: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.DataSource"]: + ) -> ItemPaged["_models.DataSource"]: """Gets the first page of data source instances in a workspace with the link to the next page. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -506,10 +508,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataSourceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -533,7 +535,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py index edeb17269ab1..9c60b261919c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_deleted_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +27,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -42,13 +42,13 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/deletedWorkspaces" - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } @@ -68,14 +68,14 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/deletedWorkspaces", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -106,15 +106,17 @@ class DeletedWorkspacesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: + def list(self, **kwargs: Any) -> ItemPaged["_models.Workspace"]: """Gets recently deleted workspaces in a subscription, available for recovery. :return: An iterator like instance of either Workspace or the result of cls(response) @@ -124,10 +126,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -147,7 +149,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -170,7 +183,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -178,7 +194,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Workspace"]: """Gets recently deleted workspaces in a resource group, available for recovery. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -191,10 +207,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -215,7 +231,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -238,7 +265,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py index 1d5a4d78fdd9..679600e933ac 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_gateways_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -42,12 +41,12 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/gateways/{gatewayId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -84,12 +83,14 @@ class GatewaysOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -108,7 +109,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -119,7 +120,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py index 0bfcf5191431..ec695ca62d0d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_intelligence_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -42,12 +41,12 @@ def build_disable_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Disable", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -77,12 +76,12 @@ def build_enable_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Enable", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -113,14 +112,14 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -159,12 +158,14 @@ class IntelligencePacksOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def disable( # pylint: disable=inconsistent-return-statements @@ -183,7 +184,7 @@ def disable( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -194,7 +195,7 @@ def disable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_disable_request( @@ -239,7 +240,7 @@ def enable( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -250,7 +251,7 @@ def enable( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_enable_request( @@ -292,7 +293,7 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> :rtype: list[~azure.mgmt.loganalytics.models.IntelligencePack] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -303,7 +304,7 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[List[_models.IntelligencePack]] = kwargs.pop("cls", None) _request = build_list_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py index 2997e59abacf..415f20d18c21 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -30,14 +32,12 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -49,7 +49,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -57,7 +57,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -93,14 +93,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -134,14 +134,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -175,14 +175,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -221,12 +221,14 @@ class LinkedServicesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") def _create_or_update_initial( self, @@ -236,7 +238,7 @@ def _create_or_update_initial( parameters: Union[_models.LinkedService, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -247,7 +249,7 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -387,7 +389,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -435,7 +437,7 @@ def get_long_running_output(pipeline_response): def _delete_initial( self, resource_group_name: str, workspace_name: str, linked_service_name: str, **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -446,7 +448,7 @@ def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -504,7 +506,7 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -563,7 +565,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.LinkedService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -574,7 +576,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedService] = kwargs.pop("cls", None) _request = build_get_request( @@ -609,7 +611,7 @@ def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.LinkedService"]: + ) -> ItemPaged["_models.LinkedService"]: """Gets the linked services instances in a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -624,10 +626,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedServiceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -649,7 +651,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py index c98231c49c0b..1fcf96dcd29d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_linked_storage_accounts_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -49,7 +49,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -57,7 +57,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -96,12 +96,12 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -136,14 +136,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -177,14 +177,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -223,12 +223,14 @@ class LinkedStorageAccountsOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload def create_or_update( @@ -323,7 +325,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -334,7 +336,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) @@ -401,7 +403,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -412,7 +414,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -463,7 +465,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.LinkedStorageAccountsResource :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -474,7 +476,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedStorageAccountsResource] = kwargs.pop("cls", None) _request = build_get_request( @@ -509,7 +511,7 @@ def get( @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.LinkedStorageAccountsResource"]: + ) -> ItemPaged["_models.LinkedStorageAccountsResource"]: """Gets all linked storage accounts associated with the specified workspace, storage accounts will be sorted by their data source type. @@ -527,10 +529,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedStorageAccountsListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -552,7 +554,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py index 59f63d691f69..e7dee1e67aca 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_management_groups_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +27,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -44,14 +44,14 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/managementGroups", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -90,15 +90,19 @@ class ManagementGroupsOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterable["_models.ManagementGroup"]: + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.ManagementGroup"]: """Gets a list of management groups connected to a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -113,10 +117,10 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListManagementGroupsResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -138,7 +142,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py index a8afe095866c..d352ef67c7c9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operation_statuses_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -41,14 +40,14 @@ def build_get_request(location: str, async_operation_id: str, subscription_id: s _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/locations/{location}/operationStatuses/{asyncOperationId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "location": _SERIALIZER.url("location", location, "str"), "asyncOperationId": _SERIALIZER.url("async_operation_id", async_operation_id, "str"), @@ -78,12 +77,14 @@ class OperationStatusesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models.OperationStatus: @@ -97,7 +98,7 @@ def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models. :rtype: ~azure.mgmt.loganalytics.models.OperationStatus :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -108,7 +109,7 @@ def get(self, location: str, async_operation_id: str, **kwargs: Any) -> _models. _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None) _request = build_get_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py index 3e71b8ff6954..6d05673bf99b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_operations.py @@ -1,4 +1,3 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +26,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -42,7 +41,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,15 +68,17 @@ class Operations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: + def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: """Lists all of the available OperationalInsights Rest API operations. :return: An iterator like instance of either Operation or the result of cls(response) @@ -87,10 +88,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -109,7 +110,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -132,7 +144,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py index 03499676d0f3..64874fbec64f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_queries_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -52,14 +52,14 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -98,7 +98,7 @@ def build_search_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -106,7 +106,7 @@ def build_search_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/search", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -140,14 +140,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -174,7 +174,7 @@ def build_put_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -182,7 +182,7 @@ def build_put_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -211,7 +211,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -219,7 +219,7 @@ def build_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -248,14 +248,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}/queries/{id}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -288,12 +288,14 @@ class QueriesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -304,7 +306,7 @@ def list( include_body: Optional[bool] = None, skip_token: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> ItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Gets a list of Queries defined within a Log Analytics QueryPack. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -329,10 +331,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -357,7 +359,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -380,7 +393,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -399,7 +415,7 @@ def search( *, content_type: str = "application/json", **kwargs: Any - ) -> Iterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> ItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -442,7 +458,7 @@ def search( *, content_type: str = "application/json", **kwargs: Any - ) -> Iterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> ItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -482,7 +498,7 @@ def search( include_body: Optional[bool] = None, skip_token: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.LogAnalyticsQueryPackQuery"]: + ) -> ItemPaged["_models.LogAnalyticsQueryPackQuery"]: """Search a list of Queries defined within a Log Analytics QueryPack according to given search properties. @@ -513,11 +529,11 @@ def search( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQueryListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -552,7 +568,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -575,7 +602,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -599,7 +629,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -610,7 +640,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) _request = build_get_request( @@ -633,7 +663,10 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -731,7 +764,7 @@ def put( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -742,7 +775,7 @@ def put( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) @@ -777,7 +810,10 @@ def put( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -875,7 +911,7 @@ def update( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPackQuery :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -886,7 +922,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPackQuery] = kwargs.pop("cls", None) @@ -921,7 +957,10 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPackQuery", pipeline_response.http_response) @@ -948,7 +987,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -959,7 +998,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -982,7 +1021,10 @@ def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py index 5c8352b33be0..d71183626dbe 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_query_packs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -43,7 +43,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,14 +69,14 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -101,7 +101,7 @@ def build_create_or_update_without_name_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -109,7 +109,7 @@ def build_create_or_update_without_name_request( # pylint: disable=name-too-lon _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -136,14 +136,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -169,14 +169,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -202,7 +202,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -210,7 +210,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -238,7 +238,7 @@ def build_update_tags_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -246,7 +246,7 @@ def build_update_tags_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/queryPacks/{queryPackName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -280,15 +280,17 @@ class QueryPacksOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.LogAnalyticsQueryPack"]: + def list(self, **kwargs: Any) -> ItemPaged["_models.LogAnalyticsQueryPack"]: """Gets a list of all Log Analytics QueryPacks within a subscription. :return: An iterator like instance of either LogAnalyticsQueryPack or the result of @@ -299,10 +301,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.LogAnalyticsQueryPack"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -322,7 +324,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -345,7 +358,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -355,7 +371,7 @@ def get_next(next_link=None): @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any - ) -> Iterable["_models.LogAnalyticsQueryPack"]: + ) -> ItemPaged["_models.LogAnalyticsQueryPack"]: """Gets a list of Log Analytics QueryPacks within a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -369,10 +385,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPackListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -393,7 +409,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -416,7 +443,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -497,7 +527,7 @@ def create_or_update_without_name( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -508,7 +538,7 @@ def create_or_update_without_name( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -541,7 +571,10 @@ def create_or_update_without_name( if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -566,7 +599,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -577,7 +610,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -599,7 +632,10 @@ def delete( # pylint: disable=inconsistent-return-statements if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -618,7 +654,7 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -629,7 +665,7 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) _request = build_get_request( @@ -651,7 +687,10 @@ def get(self, resource_group_name: str, query_pack_name: str, **kwargs: Any) -> if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -744,7 +783,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -755,7 +794,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -789,7 +828,10 @@ def create_or_update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) @@ -875,7 +917,7 @@ def update_tags( :rtype: ~azure.mgmt.loganalytics.models.LogAnalyticsQueryPack :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -886,7 +928,7 @@ def update_tags( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2019-09-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LogAnalyticsQueryPack] = kwargs.pop("cls", None) @@ -920,7 +962,10 @@ def update_tags( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("LogAnalyticsQueryPack", pipeline_response.http_response) diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py index 95ff8c154536..38e159fe52e1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_saved_searches_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +26,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -43,12 +42,12 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -79,7 +78,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -87,7 +86,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -123,14 +122,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -164,14 +163,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -210,12 +209,14 @@ class SavedSearchesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -234,7 +235,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -245,7 +246,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -358,7 +359,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -369,7 +370,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) @@ -430,7 +431,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.SavedSearch :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -441,7 +442,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SavedSearch] = kwargs.pop("cls", None) _request = build_get_request( @@ -488,7 +489,7 @@ def list_by_workspace( :rtype: ~azure.mgmt.loganalytics.models.SavedSearchesListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -499,7 +500,7 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SavedSearchesListResult] = kwargs.pop("cls", None) _request = build_list_by_workspace_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py index 256ec008a860..5b1ca89cf3ef 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_schema_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -43,14 +42,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/schema", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -89,12 +88,14 @@ class SchemaOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.SearchGetSchemaResponse: @@ -109,7 +110,7 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ :rtype: ~azure.mgmt.loganalytics.models.SearchGetSchemaResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -120,7 +121,7 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SearchGetSchemaResponse] = kwargs.pop("cls", None) _request = build_get_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py index 46f987e354be..89b5a672af3b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_shared_keys_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -24,14 +25,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -43,14 +42,14 @@ def build_get_shared_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/sharedKeys", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -83,14 +82,14 @@ def build_regenerate_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/regenerateSharedKey", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -129,12 +128,14 @@ class SharedKeysOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.SharedKeys: @@ -149,7 +150,7 @@ def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwarg :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -160,7 +161,7 @@ def get_shared_keys(self, resource_group_name: str, workspace_name: str, **kwarg _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) _request = build_get_shared_keys_request( @@ -205,7 +206,7 @@ def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: An :rtype: ~azure.mgmt.loganalytics.models.SharedKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -216,7 +217,7 @@ def regenerate(self, resource_group_name: str, workspace_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SharedKeys] = kwargs.pop("cls", None) _request = build_regenerate_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py index 3ccf5387e0b1..8d07f633a671 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_storage_insight_configs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -26,14 +28,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -45,7 +45,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -53,7 +53,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -89,14 +89,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -129,12 +129,12 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -165,14 +165,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -211,12 +211,14 @@ class StorageInsightConfigsOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload def create_or_update( @@ -303,7 +305,7 @@ def create_or_update( :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -314,7 +316,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) @@ -375,7 +377,7 @@ def get( :rtype: ~azure.mgmt.loganalytics.models.StorageInsight :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -386,7 +388,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StorageInsight] = kwargs.pop("cls", None) _request = build_get_request( @@ -435,7 +437,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -446,7 +448,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_delete_request( @@ -477,7 +479,7 @@ def delete( # pylint: disable=inconsistent-return-statements @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.StorageInsight"]: + ) -> ItemPaged["_models.StorageInsight"]: """Lists the storage insight instances within a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -492,10 +494,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StorageInsightListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -517,7 +519,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_summary_logs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_summary_logs_operations.py new file mode 100644 index 000000000000..bccdcd1afd88 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_summary_logs_operations.py @@ -0,0 +1,1243 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_by_workspace_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}/start", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stop_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}/stop", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_retry_bin_request( + resource_group_name: str, workspace_name: str, summary_logs_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/summaryLogs/{summaryLogsName}/retrybin", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "summaryLogsName": _SERIALIZER.url("summary_logs_name", summary_logs_name, "str", pattern=r"[^/]+"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class SummaryLogsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.loganalytics.LogAnalyticsManagementClient`'s + :attr:`summary_logs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_workspace( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.SummaryLogs"]: + """Gets all summary rules for the specified Log Analytics workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An iterator like instance of either SummaryLogs or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SummaryLogsListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SummaryLogsListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogs, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "SummaryLogs") + + _request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: _models.SummaryLogs, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogs + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogs, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.SummaryLogs]: + """Creates or updates Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to update summary rules properties. Is either a + SummaryLogs type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogs or IO[bytes] + :return: An instance of LROPoller that returns either SummaryLogs or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.loganalytics.models.SummaryLogs] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.SummaryLogs] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = self._deserialize("SummaryLogs", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.SummaryLogs].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.SummaryLogs]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> _models.SummaryLogs: + """Gets Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: SummaryLogs or the result of cls(response) + :rtype: ~azure.mgmt.loganalytics.models.SummaryLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SummaryLogs] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("SummaryLogs", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _delete_initial( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes Log Analytics workspace Summary rules. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _start_initial( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_start_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_start( + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Starts an inactive Summary rule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def stop( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, summary_logs_name: str, **kwargs: Any + ) -> None: + """Stops an active Summary rule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_stop_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + def _retry_bin_initial( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogsRetryBin, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "SummaryLogsRetryBin") + + _request = build_retry_bin_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated2, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: _models.SummaryLogsRetryBin, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBin + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_retry_bin( + self, + resource_group_name: str, + workspace_name: str, + summary_logs_name: str, + parameters: Union[_models.SummaryLogsRetryBin, IO[bytes]], + **kwargs: Any + ) -> LROPoller[None]: + """Retries a failed Summary rule bin. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param summary_logs_name: The name of the summary logs. Must not contain '/'. Required. + :type summary_logs_name: str + :param parameters: The parameters required to retry a Summary rule bin. Is either a + SummaryLogsRetryBin type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.loganalytics.models.SummaryLogsRetryBin or IO[bytes] + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._retry_bin_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + summary_logs_name=summary_logs_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py index 289ff15d3bf1..2cfdbff18ce3 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_tables_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -30,14 +32,12 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -49,14 +49,14 @@ def build_list_by_workspace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -89,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -97,7 +97,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -111,7 +111,9 @@ def build_create_or_update_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -133,7 +135,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -141,7 +143,7 @@ def build_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -155,7 +157,9 @@ def build_update_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -177,14 +181,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -198,7 +202,9 @@ def build_get_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -218,14 +224,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -239,7 +245,9 @@ def build_delete_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -259,14 +267,14 @@ def build_migrate_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/migrate", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -280,7 +288,9 @@ def build_migrate_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -300,14 +310,14 @@ def build_cancel_search_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/cancelSearch", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( @@ -321,7 +331,9 @@ def build_cancel_search_request( min_length=4, pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", ), - "tableName": _SERIALIZER.url("table_name", table_name, "str"), + "tableName": _SERIALIZER.url( + "table_name", table_name, "str", max_length=63, min_length=4, pattern=r"^[A-Za-z0-9-_]+$" + ), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -347,17 +359,19 @@ class TablesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_workspace( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.Table"]: + ) -> ItemPaged["_models.Table"]: """Gets all the tables for the specified Log Analytics workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -372,10 +386,10 @@ def list_by_workspace( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TablesListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -397,7 +411,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -420,7 +445,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -435,7 +463,7 @@ def _create_or_update_initial( parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -446,7 +474,7 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -486,7 +514,10 @@ def _create_or_update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -584,7 +615,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -639,7 +670,7 @@ def _update_initial( parameters: Union[_models.Table, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -650,7 +681,7 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -690,7 +721,10 @@ def _update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -788,7 +822,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -850,7 +884,7 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** :rtype: ~azure.mgmt.loganalytics.models.Table :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -861,7 +895,7 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Table] = kwargs.pop("cls", None) _request = build_get_request( @@ -884,7 +918,10 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Table", pipeline_response.http_response) @@ -897,7 +934,7 @@ def get(self, resource_group_name: str, workspace_name: str, table_name: str, ** def _delete_initial( self, resource_group_name: str, workspace_name: str, table_name: str, **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -908,7 +945,7 @@ def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -936,13 +973,23 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore @@ -966,7 +1013,7 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1024,7 +1071,7 @@ def migrate( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1035,7 +1082,7 @@ def migrate( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_migrate_request( @@ -1058,7 +1105,10 @@ def migrate( # pylint: disable=inconsistent-return-statements if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -1081,7 +1131,7 @@ def cancel_search( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1092,7 +1142,7 @@ def cancel_search( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) _request = build_cancel_search_request( @@ -1115,7 +1165,10 @@ def cancel_search( # pylint: disable=inconsistent-return-statements if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py index 8463ceba961f..f2e9df27c2bd 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_usages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,9 +6,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar +from collections.abc import MutableMapping +from typing import Any, Callable, Optional, TypeVar +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +27,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -44,14 +44,14 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/usages", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -90,15 +90,17 @@ class UsagesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterable["_models.UsageMetric"]: + def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> ItemPaged["_models.UsageMetric"]: """Gets a list of usage metrics for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -113,10 +115,10 @@ def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListUsagesResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -138,7 +140,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py index 55bb9b7f7dba..e8f7556ea6a2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspace_purge_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -25,14 +26,12 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -44,7 +43,7 @@ def build_purge_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -52,7 +51,7 @@ def build_purge_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/purge", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -87,14 +86,14 @@ def build_get_purge_status_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/operations/{purgeId}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -134,12 +133,14 @@ class WorkspacePurgeOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload def purge( @@ -246,7 +247,7 @@ def purge( :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -257,7 +258,7 @@ def purge( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspacePurgeResponse] = kwargs.pop("cls", None) @@ -323,7 +324,7 @@ def get_purge_status( :rtype: ~azure.mgmt.loganalytics.models.WorkspacePurgeStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -334,7 +335,7 @@ def get_purge_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-08-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspacePurgeStatusResponse] = kwargs.pop("cls", None) _request = build_get_purge_status_request( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py index af941812437c..1fff1a2a811c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/_workspaces_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,10 +6,12 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -30,14 +32,12 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._serialization import Serializer +from .._configuration import LogAnalyticsManagementClientConfiguration +from .._utils.serialization import Deserializer, Serializer -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -47,7 +47,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -73,14 +73,14 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -105,7 +105,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -113,7 +113,7 @@ def build_create_or_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -148,14 +148,14 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -190,14 +190,14 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -230,7 +230,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -238,7 +238,7 @@ def build_update_request( _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}", - ) # pylint: disable=line-too-long + ) path_format_arguments = { "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 @@ -267,6 +267,231 @@ def build_update_request( return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) +def build_failover_request( + resource_group_name: str, location: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/locations/{location}/workspaces/{workspaceName}/failover", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "location": _SERIALIZER.url("location", location, "str", min_length=1), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_failback_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/failback", + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_nsp_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/networkSecurityPerimeterConfigurations", + ) + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_nsp_request( + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}", + ) + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( + "network_security_perimeter_configuration_name", + network_security_perimeter_configuration_name, + "str", + max_length=512, + min_length=1, + pattern=r"^.*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_reconcile_nsp_request( + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/networkSecurityPerimeterConfigurations/{networkSecurityPerimeterConfigurationName}/reconcile", + ) + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", + workspace_name, + "str", + max_length=63, + min_length=4, + pattern=r"^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$", + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "networkSecurityPerimeterConfigurationName": _SERIALIZER.url( + "network_security_perimeter_configuration_name", + network_security_perimeter_configuration_name, + "str", + max_length=512, + min_length=1, + pattern=r"^.*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + class WorkspacesOperations: """ .. warning:: @@ -279,15 +504,17 @@ class WorkspacesOperations: models = _models - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: LogAnalyticsManagementClientConfiguration = ( + input_args.pop(0) if input_args else kwargs.pop("config") + ) + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: + def list(self, **kwargs: Any) -> ItemPaged["_models.Workspace"]: """Gets the workspaces in a subscription. :return: An iterator like instance of either Workspace or the result of cls(response) @@ -297,10 +524,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Workspace"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -320,7 +547,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -343,7 +581,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -351,7 +592,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]: + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.Workspace"]: """Gets workspaces in a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -364,10 +605,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -388,7 +629,18 @@ def prepare_request(next_link=None): _request.url = self._client.format_url(_request.url) else: - _request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -411,7 +663,10 @@ def get_next(next_link=None): if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -425,7 +680,7 @@ def _create_or_update_initial( parameters: Union[_models.Workspace, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -436,7 +691,7 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) @@ -475,7 +730,10 @@ def _create_or_update_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -564,7 +822,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -611,7 +869,7 @@ def get_long_running_output(pipeline_response): def _delete_initial( self, resource_group_name: str, workspace_name: str, force: Optional[bool] = None, **kwargs: Any ) -> Iterator[bytes]: - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -622,7 +880,7 @@ def _delete_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( @@ -650,7 +908,10 @@ def _delete_initial( except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) @@ -684,7 +945,7 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -735,7 +996,7 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -746,7 +1007,7 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) _request = build_get_request( @@ -768,7 +1029,10 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Workspace", pipeline_response.http_response) @@ -854,7 +1118,7 @@ def update( :rtype: ~azure.mgmt.loganalytics.models.Workspace :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -865,7 +1129,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) @@ -899,7 +1163,10 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize("Workspace", pipeline_response.http_response) @@ -908,3 +1175,525 @@ def update( return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + + def _failover_initial( + self, resource_group_name: str, location: str, workspace_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_failover_request( + resource_group_name=resource_group_name, + location=location, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_failover( + self, resource_group_name: str, location: str, workspace_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Activates failover for the specified workspace. + + The specified replication location must match the location of the enabled replication for this + workspace. The failover operation is asynchronous and can take up to 30 minutes to complete. + The status of the operation can be checked using the operationId returned in the response. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param location: The name of the Azure region. Required. + :type location: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._failover_initial( + resource_group_name=resource_group_name, + location=location, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _failback_initial(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_failback_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponseAutoGenerated, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["azure-asyncoperation"] = self._deserialize( + "str", response.headers.get("azure-asyncoperation") + ) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_failback(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: + """Deactivates failover for the specified workspace. + + The failback operation is asynchronous and can take up to 30 minutes to complete. The status of + the operation can be checked using the operationId returned in the response. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._failback_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_nsp( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.NetworkSecurityPerimeterConfiguration"]: + """Gets a list of NSP configurations for specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :return: An iterator like instance of either NetworkSecurityPerimeterConfiguration or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NetworkSecurityPerimeterConfigurationListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("NetworkSecurityPerimeterConfigurationListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_nsp( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> _models.NetworkSecurityPerimeterConfiguration: + """Gets a network security perimeter configuration. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param network_security_perimeter_configuration_name: The name for a network security perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: NetworkSecurityPerimeterConfiguration or the result of cls(response) + :rtype: ~azure.mgmt.loganalytics.models.NetworkSecurityPerimeterConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NetworkSecurityPerimeterConfiguration] = kwargs.pop("cls", None) + + _request = build_get_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NetworkSecurityPerimeterConfiguration", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _reconcile_nsp_initial( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_reconcile_nsp_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize( + _models.ErrorResponse, + pipeline_response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_reconcile_nsp( + self, + resource_group_name: str, + workspace_name: str, + network_security_perimeter_configuration_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Reconcile network security perimeter configuration for Workspace resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param network_security_perimeter_configuration_name: The name for a network security perimeter + configuration. Required. + :type network_security_perimeter_configuration_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._reconcile_nsp_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + network_security_perimeter_configuration_name=network_security_perimeter_configuration_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py index 3d9027cdcd17..4591c4e9605b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_create.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -34,14 +35,14 @@ def main(): resource_group_name="oiautorest6685", cluster_name="oiautorest6685", parameters={ - "location": "australiasoutheast", - "sku": {"capacity": 1000, "name": "CapacityReservation"}, + "location": "eastus", + "sku": {"capacity": 100, "name": "CapacityReservation"}, "tags": {"tag1": "val1"}, }, ).result() print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py index f6386e52f64b..07bfdcc1ab58 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -36,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py index 98971c38db92..3d243f8bfb57 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py index 38cf9b85abcc..dd1f9c2e79ce 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_list_by_resource_group.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersListByResourceGroup.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py index 0bb8cf14db8a..3a5e23914bb7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_subscription_list.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -35,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersSubscriptionList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersSubscriptionList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py index b5274f3a2b8d..28883a2a5726 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/clusters_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -55,6 +56,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/ClustersUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/ClustersUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py index 7b53bdaaca6f..a17c0308fac8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_create_or_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -46,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataExportCreateOrUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataExportCreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py index 46c8b5ec5279..1b22fb241370 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataExportDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataExportDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py index 88ccaa7e568c..a14dd605ccef 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataExportGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataExportGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py index f73fa3feebea..98b90ccd15be 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_export_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataExportListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataExportListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py index e84362f87c38..5578d0cb04f9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_create.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -44,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataSourcesCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataSourcesCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py index 32d06490844e..e5fc333aeb3a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataSourcesDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataSourcesDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py index 358dacaa37c6..99c98f379352 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataSourcesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataSourcesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py index 370b11e7cc3b..1ae65471cfe0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/data_sources_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -39,6 +40,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/DataSourcesListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/DataSourcesListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py index ca45e507a7ec..c8a06afeae55 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_create.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedServicesCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedServicesCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py index 30a5412f442e..0f919c2305a2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedServicesDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedServicesDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py index b7df436bc6c6..54aa5d605ebe 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedServicesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedServicesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py index ec254154bed7..19c3a323d41b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_services_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedServicesListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedServicesListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py index a4975dd5a093..135f7c3fbf5d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_create.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -46,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedStorageAccountsCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedStorageAccountsCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py index b512fca2adfd..9bd755d169bc 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedStorageAccountsDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedStorageAccountsDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py index 91149cc477da..3208331614de 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedStorageAccountsGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedStorageAccountsGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py index cb171feefe26..dd214d922b18 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/linked_storage_accounts_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/LinkedStorageAccountsListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/LinkedStorageAccountsListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_get.py new file mode 100644 index 000000000000..df5c2e852202 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_get.py @@ -0,0 +1,44 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python nsp_for_workspaces_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.workspaces.get_nsp( + resource_group_name="exampleRG", + workspace_name="someWorkspace", + network_security_perimeter_configuration_name="somePerimeterConfiguration", + ) + print(response) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/NSPForWorkspaces_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_list.py new file mode 100644 index 000000000000..0da688512196 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_list.py @@ -0,0 +1,44 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python nsp_for_workspaces_list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.workspaces.list_nsp( + resource_group_name="exampleRG", + workspace_name="someWorkspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/NSPForWorkspaces_List.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_reconcile.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_reconcile.py new file mode 100644 index 000000000000..a7cc98d97a9b --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/nsp_for_workspaces_reconcile.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python nsp_for_workspaces_reconcile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.workspaces.begin_reconcile_nsp( + resource_group_name="exampleRG", + workspace_name="someWorkspace", + network_security_perimeter_configuration_name="somePerimeterConfiguration", + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/NSPForWorkspaces_Reconcile.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py index ca75de32507f..6e24a725795c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operation_statuses_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/OperationStatusesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/OperationStatusesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py index 1a675593e69d..eda5dbad78d7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/operations_list_by_tenant.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -35,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/OperationsListByTenant.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/OperationsListByTenant.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py index aa7d99adf1ba..9c135a1f4520 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPackQueriesDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py index 7cc2450995f0..b91781dbf62c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPackQueriesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py index 689351a85ad4..cb00b5a27dbc 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_pack_queries_list.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPackQueriesList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py index a19fc00230d5..19175160f02c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -36,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPacksDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPacksDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py index 66fe5278a2ef..cb97b086d9a8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPacksGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPacksGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py index e51ff5f26d01..3fea7a0e353c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -35,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPacksList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPacksList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py index ba092ff61dd6..0c1ecfcba28b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/query_packs_list_by_resource_group.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPacksListByResourceGroup.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/QueryPacksListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py index edbd2af2b017..a5360d259a05 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_get_schema.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/SavedSearchesGetSchema.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SavedSearchesGetSchema.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py index 34401c2cdfe2..a2a1fb227903 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/saved_searches_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/SavedSearchesListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SavedSearchesListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py index e3f5998755fc..c3fb9975efc2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_create_or_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -48,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/StorageInsightsCreateOrUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/StorageInsightsCreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py index 29ba98854692..b2d8da467265 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/StorageInsightsDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/StorageInsightsDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py index cb700ea8c6d2..53a40c1464a6 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/StorageInsightsGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/StorageInsightsGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py index d0408e4b7899..65d65aad99d8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/storage_insights_list_by_workspace.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/StorageInsightsListByWorkspace.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/StorageInsightsListByWorkspace.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_delete.py new file mode 100644 index 000000000000..66be3073bfc2 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_delete.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + client.summary_logs.begin_delete( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + summary_logs_name="summarylogs1", + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsDelete.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_get.py new file mode 100644 index 000000000000..531b95526809 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_get.py @@ -0,0 +1,44 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.summary_logs.get( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + summary_logs_name="summarylogs1", + ) + print(response) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsGet.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_list.py new file mode 100644 index 000000000000..fac1a6d1910f --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_list.py @@ -0,0 +1,44 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.summary_logs.list_by_workspace( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsList.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_retry_bin.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_retry_bin.py new file mode 100644 index 000000000000..b06c94f1f512 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_retry_bin.py @@ -0,0 +1,44 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_retry_bin.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + client.summary_logs.begin_retry_bin( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + summary_logs_name="summarylogs1", + parameters={"properties": {"retryBinStartTime": "2020-02-03T04:00:00Z"}}, + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsRetryBin.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_start.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_start.py new file mode 100644 index 000000000000..cda3d1ed40f3 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_start.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_start.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + client.summary_logs.begin_start( + resource_group_name="exampleresourcegroup", + workspace_name="exampleworkspace", + summary_logs_name="summarylogs3", + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsStart.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_stop.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_stop.py new file mode 100644 index 000000000000..fd706365d77d --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_stop.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_stop.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + client.summary_logs.stop( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + summary_logs_name="summarylogs1", + ) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsStop.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_upsert.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_upsert.py new file mode 100644 index 000000000000..eeb000228ac1 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/summary_logs_upsert.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python summary_logs_upsert.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.summary_logs.begin_create_or_update( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + summary_logs_name="summarylogs1", + parameters={ + "properties": { + "ruleDefinition": { + "binDelay": 10, + "binSize": 180, + "binStartTime": "2020-02-03T04:05:06Z", + "query": "MyTable_CL", + }, + "ruleType": "User", + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/SummaryLogsUpsert.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py index e0bfe4656fa6..8113700a0c67 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py index 035130a6d7ec..c6f7e0fd673d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py index d9debdcf1d2a..c32c68cbe123 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_list.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py index 47958c7c1bcb..85c1f13bf49b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_migrate.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesMigrate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesMigrate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py index e4d548d6240d..e5bda075ac55 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_search_cancel.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesSearchCancel.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesSearchCancel.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py index ec01780d1520..1ef3e468f222 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/tables_upsert.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -30,7 +31,7 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.tables.begin_update( + response = client.tables.begin_create_or_update( resource_group_name="oiautorest6685", workspace_name="oiautorest6685", table_name="AzureNetworkFlow", @@ -45,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/TablesUpsert.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/TablesUpsert.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py index 2dd225845d6c..0ecc94ffcabe 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_available_service_tiers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesAvailableServiceTiers.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesAvailableServiceTiers.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py index 22fb133340a8..75ec11214bff 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_create.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -42,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesCreate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesCreate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py index 11b3d0ee7e07..bd9562d0ef82 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -36,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py index 8b9fd9c84200..fbc458ea9723 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_delete_saved_searches.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesDeleteSavedSearches.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesDeleteSavedSearches.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py index 77948284f902..2ed72c658988 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_disable_intelligence_pack.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesDisableIntelligencePack.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesDisableIntelligencePack.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py index f19ec4f6cb0f..3712746d8111 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_enable_intelligence_pack.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesEnableIntelligencePack.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesEnableIntelligencePack.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failback.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failback.py new file mode 100644 index 000000000000..c828113be8a5 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failback.py @@ -0,0 +1,42 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python workspaces_failback.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", + ) + + client.workspaces.begin_failback( + resource_group_name="oiautorest6685", + workspace_name="oiautorest6685", + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesFailback.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failover.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failover.py new file mode 100644 index 000000000000..645498178a58 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_failover.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-loganalytics +# USAGE + python workspaces_failover.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LogAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="53bc36c5-91e1-4d09-92c9-63b89e571926", + ) + + client.workspaces.begin_failover( + resource_group_name="oiautorest6685", + location="eastus", + workspace_name="oiautorest6685", + ).result() + + +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesFailover.json +if __name__ == "__main__": + main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py index 516b776c582c..4562b5f4062d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_gateways_delete.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): ) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesGatewaysDelete.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesGatewaysDelete.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py index 658a7fbd4fd0..b57a806e126b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py index 08e6a242c781..d2a458b46559 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_get_shared_keys.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesGetSharedKeys.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesGetSharedKeys.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py index b71d980c3e4e..6bab16fa88a0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_by_resource_group.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesListByResourceGroup.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py index 3ed8bf4c2e7b..8d36fefe7bd7 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_intelligence_packs.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesListIntelligencePacks.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesListIntelligencePacks.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py index 2b4f496106c1..e6c3a9d5e78a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_management_groups.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesListManagementGroups.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesListManagementGroups.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py index 80d12940f217..09b3e65f8232 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_list_usages.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesListUsages.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesListUsages.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py index 5f2d589a4aca..1159a4220543 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -41,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesPurge.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesPurge.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py index 845dc6de3fa0..dbb5cb7f192b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_operation.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesPurgeOperation.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesPurgeOperation.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py index 7a47452ca322..89a706bfbd7c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_purge_resource_id.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesPurgeResourceId.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesPurgeResourceId.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py index 75b952d527df..505a33eb5fef 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_regenerate_shared_keys.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesRegenerateSharedKeys.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesRegenerateSharedKeys.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py index d254f65b0972..e32d43b23577 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_create_or_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -49,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesSavedSearchesCreateOrUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesSavedSearchesCreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py index fa6cb56271d0..dfd6119affc4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_saved_searches_get.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2020-08-01/examples/WorkspacesSavedSearchesGet.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesSavedSearchesGet.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py index 251fd49f9799..0b4e380af910 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_subscription_list.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -30,11 +31,11 @@ def main(): subscription_id="00000000-0000-0000-0000-00000000000", ) - response = client.deleted_workspaces.list() + response = client.workspaces.list() for item in response: print(item) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesSubscriptionList.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesSubscriptionList.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py index 804286e0d7b0..f9e07b301aa4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_samples/workspaces_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -44,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2022-10-01/examples/WorkspacesUpdate.json +# x-ms-original-file: specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/OperationalInsights/stable/2025-07-01/examples/WorkspacesUpdate.json if __name__ == "__main__": main() diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py index 1fbe0ba3e8b2..879575556450 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/conftest.py @@ -18,7 +18,7 @@ load_dotenv() -# aovid record sensitive identity information in recordings +# For security, please avoid record sensitive identity information in recordings @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): loganalyticsmanagement_subscription_id = os.environ.get( diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py index 7227fc83cd7e..21350216e435 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_available_service_tiers_list_by_workspace(self, resource_group): response = self.client.available_service_tiers.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py index 45b08534c726..b628d8e575ee 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_available_service_tiers_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_available_service_tiers_list_by_workspace(self, resource_group): response = await self.client.available_service_tiers.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py index 31742b02cb41..72a2755a6d54 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations.py @@ -20,10 +20,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_resource_group(self, resource_group): + def test_clusters_list_by_resource_group(self, resource_group): response = self.client.clusters.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -31,9 +31,9 @@ def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_clusters_list(self, resource_group): response = self.client.clusters.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -41,19 +41,24 @@ def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_create_or_update(self, resource_group): + def test_clusters_begin_create_or_update(self, resource_group): response = self.client.clusters.begin_create_or_update( resource_group_name=resource_group.name, cluster_name="str", parameters={ "location": "str", "associatedWorkspaces": [ - {"associateDate": "str", "resourceId": "str", "workspaceId": "str", "workspaceName": "str"} + { + "associateDate": "2020-02-20 00:00:00", + "resourceId": "str", + "workspaceId": "str", + "workspaceName": "str", + } ], "billingType": "str", - "capacityReservationProperties": {"lastSkuUpdate": "str", "minCapacity": 0}, + "capacityReservationProperties": {"lastSkuUpdate": "2020-02-20 00:00:00", "minCapacity": 0}, "clusterId": "str", - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "id": "str", "identity": { "type": "str", @@ -64,14 +69,22 @@ def test_begin_create_or_update(self, resource_group): "isAvailabilityZonesEnabled": bool, "isDoubleEncryptionEnabled": bool, "keyVaultProperties": {"keyName": "str", "keyRsaSize": 0, "keyVaultUri": "str", "keyVersion": "str"}, - "lastModifiedDate": "str", + "lastModifiedDate": "2020-02-20 00:00:00", "name": "str", "provisioningState": "str", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "isAvailabilityZonesEnabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "sku": {"capacity": 0, "name": "str"}, "tags": {"str": "str"}, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -79,11 +92,11 @@ def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_delete(self, resource_group): + def test_clusters_begin_delete(self, resource_group): response = self.client.clusters.begin_delete( resource_group_name=resource_group.name, cluster_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -91,11 +104,11 @@ def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_clusters_get(self, resource_group): response = self.client.clusters.get( resource_group_name=resource_group.name, cluster_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -103,7 +116,7 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_update(self, resource_group): + def test_clusters_begin_update(self, resource_group): response = self.client.clusters.begin_update( resource_group_name=resource_group.name, cluster_name="str", @@ -119,7 +132,7 @@ def test_begin_update(self, resource_group): "sku": {"capacity": 0, "name": "str"}, "tags": {"str": "str"}, }, - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py index 1cb078e0d3ce..3b4cd90edee1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_clusters_operations_async.py @@ -21,10 +21,10 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_resource_group(self, resource_group): + async def test_clusters_list_by_resource_group(self, resource_group): response = self.client.clusters.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -32,9 +32,9 @@ async def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_clusters_list(self, resource_group): response = self.client.clusters.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -42,7 +42,7 @@ async def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_create_or_update(self, resource_group): + async def test_clusters_begin_create_or_update(self, resource_group): response = await ( await self.client.clusters.begin_create_or_update( resource_group_name=resource_group.name, @@ -50,12 +50,17 @@ async def test_begin_create_or_update(self, resource_group): parameters={ "location": "str", "associatedWorkspaces": [ - {"associateDate": "str", "resourceId": "str", "workspaceId": "str", "workspaceName": "str"} + { + "associateDate": "2020-02-20 00:00:00", + "resourceId": "str", + "workspaceId": "str", + "workspaceName": "str", + } ], "billingType": "str", - "capacityReservationProperties": {"lastSkuUpdate": "str", "minCapacity": 0}, + "capacityReservationProperties": {"lastSkuUpdate": "2020-02-20 00:00:00", "minCapacity": 0}, "clusterId": "str", - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "id": "str", "identity": { "type": "str", @@ -71,14 +76,22 @@ async def test_begin_create_or_update(self, resource_group): "keyVaultUri": "str", "keyVersion": "str", }, - "lastModifiedDate": "str", + "lastModifiedDate": "2020-02-20 00:00:00", "name": "str", "provisioningState": "str", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "isAvailabilityZonesEnabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "sku": {"capacity": 0, "name": "str"}, "tags": {"str": "str"}, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -87,12 +100,12 @@ async def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_delete(self, resource_group): + async def test_clusters_begin_delete(self, resource_group): response = await ( await self.client.clusters.begin_delete( resource_group_name=resource_group.name, cluster_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -101,11 +114,11 @@ async def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_clusters_get(self, resource_group): response = await self.client.clusters.get( resource_group_name=resource_group.name, cluster_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -113,7 +126,7 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_update(self, resource_group): + async def test_clusters_begin_update(self, resource_group): response = await ( await self.client.clusters.begin_update( resource_group_name=resource_group.name, @@ -135,7 +148,7 @@ async def test_begin_update(self, resource_group): "sku": {"capacity": 0, "name": "str"}, "tags": {"str": "str"}, }, - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py index b38e545a8332..9e15ba80960f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_data_exports_list_by_workspace(self, resource_group): response = self.client.data_exports.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -32,7 +32,7 @@ def test_list_by_workspace(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_data_exports_create_or_update(self, resource_group): response = self.client.data_exports.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -49,7 +49,7 @@ def test_create_or_update(self, resource_group): "tableNames": ["str"], "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -57,12 +57,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_data_exports_get(self, resource_group): response = self.client.data_exports.get( resource_group_name=resource_group.name, workspace_name="str", data_export_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -70,12 +70,12 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_data_exports_delete(self, resource_group): response = self.client.data_exports.delete( resource_group_name=resource_group.name, workspace_name="str", data_export_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py index c8c275998c56..1bbf7a4d9093 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_exports_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_data_exports_list_by_workspace(self, resource_group): response = self.client.data_exports.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -33,7 +33,7 @@ async def test_list_by_workspace(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_data_exports_create_or_update(self, resource_group): response = await self.client.data_exports.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -50,7 +50,7 @@ async def test_create_or_update(self, resource_group): "tableNames": ["str"], "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -58,12 +58,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_data_exports_get(self, resource_group): response = await self.client.data_exports.get( resource_group_name=resource_group.name, workspace_name="str", data_export_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -71,12 +71,12 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_data_exports_delete(self, resource_group): response = await self.client.data_exports.delete( resource_group_name=resource_group.name, workspace_name="str", data_export_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py index 641ca14768be..a2170d43e828 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations.py @@ -20,7 +20,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_data_sources_create_or_update(self, resource_group): response = self.client.data_sources.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -34,7 +34,7 @@ def test_create_or_update(self, resource_group): "tags": {"str": "str"}, "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -42,12 +42,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_data_sources_delete(self, resource_group): response = self.client.data_sources.delete( resource_group_name=resource_group.name, workspace_name="str", data_source_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -55,12 +55,12 @@ def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_data_sources_get(self, resource_group): response = self.client.data_sources.get( resource_group_name=resource_group.name, workspace_name="str", data_source_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -68,12 +68,12 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_data_sources_list_by_workspace(self, resource_group): response = self.client.data_sources.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", filter="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py index eaadae597f26..cfe22dd0adc9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_data_sources_operations_async.py @@ -21,7 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_data_sources_create_or_update(self, resource_group): response = await self.client.data_sources.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -35,7 +35,7 @@ async def test_create_or_update(self, resource_group): "tags": {"str": "str"}, "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -43,12 +43,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_data_sources_delete(self, resource_group): response = await self.client.data_sources.delete( resource_group_name=resource_group.name, workspace_name="str", data_source_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -56,12 +56,12 @@ async def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_data_sources_get(self, resource_group): response = await self.client.data_sources.get( resource_group_name=resource_group.name, workspace_name="str", data_source_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -69,12 +69,12 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_data_sources_list_by_workspace(self, resource_group): response = self.client.data_sources.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", filter="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py index e2655da89001..4248e1cc3741 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations.py @@ -20,9 +20,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_deleted_workspaces_list(self, resource_group): response = self.client.deleted_workspaces.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -30,10 +30,10 @@ def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_resource_group(self, resource_group): + def test_deleted_workspaces_list_by_resource_group(self, resource_group): response = self.client.deleted_workspaces.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py index 40790285853a..2ef645018c2d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_deleted_workspaces_operations_async.py @@ -21,9 +21,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_deleted_workspaces_list(self, resource_group): response = self.client.deleted_workspaces.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -31,10 +31,10 @@ async def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_resource_group(self, resource_group): + async def test_deleted_workspaces_list_by_resource_group(self, resource_group): response = self.client.deleted_workspaces.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py index 206d2eb6ecab..4f17177a5450 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations.py @@ -20,12 +20,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_gateways_delete(self, resource_group): response = self.client.gateways.delete( resource_group_name=resource_group.name, workspace_name="str", gateway_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py index 83924d573d7c..f6547e908590 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_gateways_operations_async.py @@ -21,12 +21,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_gateways_delete(self, resource_group): response = await self.client.gateways.delete( resource_group_name=resource_group.name, workspace_name="str", gateway_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py index 265bd2880bbe..272d2b8a7271 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations.py @@ -20,12 +20,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_disable(self, resource_group): + def test_intelligence_packs_disable(self, resource_group): response = self.client.intelligence_packs.disable( resource_group_name=resource_group.name, workspace_name="str", intelligence_pack_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -33,12 +33,12 @@ def test_disable(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_enable(self, resource_group): + def test_intelligence_packs_enable(self, resource_group): response = self.client.intelligence_packs.enable( resource_group_name=resource_group.name, workspace_name="str", intelligence_pack_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -46,11 +46,11 @@ def test_enable(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_intelligence_packs_list(self, resource_group): response = self.client.intelligence_packs.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py index d39adeee23b3..943bc9f31829 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_intelligence_packs_operations_async.py @@ -21,12 +21,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_disable(self, resource_group): + async def test_intelligence_packs_disable(self, resource_group): response = await self.client.intelligence_packs.disable( resource_group_name=resource_group.name, workspace_name="str", intelligence_pack_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -34,12 +34,12 @@ async def test_disable(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_enable(self, resource_group): + async def test_intelligence_packs_enable(self, resource_group): response = await self.client.intelligence_packs.enable( resource_group_name=resource_group.name, workspace_name="str", intelligence_pack_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -47,11 +47,11 @@ async def test_enable(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_intelligence_packs_list(self, resource_group): response = await self.client.intelligence_packs.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py index 485f4c2f70f2..1ecf5edf38a8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations.py @@ -20,7 +20,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_create_or_update(self, resource_group): + def test_linked_services_begin_create_or_update(self, resource_group): response = self.client.linked_services.begin_create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -34,7 +34,7 @@ def test_begin_create_or_update(self, resource_group): "type": "str", "writeAccessResourceId": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -42,12 +42,12 @@ def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_delete(self, resource_group): + def test_linked_services_begin_delete(self, resource_group): response = self.client.linked_services.begin_delete( resource_group_name=resource_group.name, workspace_name="str", linked_service_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -55,12 +55,12 @@ def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_linked_services_get(self, resource_group): response = self.client.linked_services.get( resource_group_name=resource_group.name, workspace_name="str", linked_service_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -68,11 +68,11 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_linked_services_list_by_workspace(self, resource_group): response = self.client.linked_services.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py index bd80aac4523c..b7855d0ae41b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_services_operations_async.py @@ -21,7 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_create_or_update(self, resource_group): + async def test_linked_services_begin_create_or_update(self, resource_group): response = await ( await self.client.linked_services.begin_create_or_update( resource_group_name=resource_group.name, @@ -36,7 +36,7 @@ async def test_begin_create_or_update(self, resource_group): "type": "str", "writeAccessResourceId": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -45,13 +45,13 @@ async def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_delete(self, resource_group): + async def test_linked_services_begin_delete(self, resource_group): response = await ( await self.client.linked_services.begin_delete( resource_group_name=resource_group.name, workspace_name="str", linked_service_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -60,12 +60,12 @@ async def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_linked_services_get(self, resource_group): response = await self.client.linked_services.get( resource_group_name=resource_group.name, workspace_name="str", linked_service_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -73,11 +73,11 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_linked_services_list_by_workspace(self, resource_group): response = self.client.linked_services.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py index 07442c313288..0b192d8ee2d3 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations.py @@ -20,7 +20,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_linked_storage_accounts_create_or_update(self, resource_group): response = self.client.linked_storage_accounts.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -32,7 +32,7 @@ def test_create_or_update(self, resource_group): "storageAccountIds": ["str"], "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -40,12 +40,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_linked_storage_accounts_delete(self, resource_group): response = self.client.linked_storage_accounts.delete( resource_group_name=resource_group.name, workspace_name="str", data_source_type="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -53,12 +53,12 @@ def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_linked_storage_accounts_get(self, resource_group): response = self.client.linked_storage_accounts.get( resource_group_name=resource_group.name, workspace_name="str", data_source_type="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -66,11 +66,11 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_linked_storage_accounts_list_by_workspace(self, resource_group): response = self.client.linked_storage_accounts.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py index 2f312b4bed51..68d9cc2e239c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_linked_storage_accounts_operations_async.py @@ -21,7 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_linked_storage_accounts_create_or_update(self, resource_group): response = await self.client.linked_storage_accounts.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -33,7 +33,7 @@ async def test_create_or_update(self, resource_group): "storageAccountIds": ["str"], "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -41,12 +41,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_linked_storage_accounts_delete(self, resource_group): response = await self.client.linked_storage_accounts.delete( resource_group_name=resource_group.name, workspace_name="str", data_source_type="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -54,12 +54,12 @@ async def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_linked_storage_accounts_get(self, resource_group): response = await self.client.linked_storage_accounts.get( resource_group_name=resource_group.name, workspace_name="str", data_source_type="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -67,11 +67,11 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_linked_storage_accounts_list_by_workspace(self, resource_group): response = self.client.linked_storage_accounts.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py index ab224612c265..8a631b06d422 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_management_groups_list(self, resource_group): response = self.client.management_groups.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py index dfbcf1d4b08a..01bafeaa3c5e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_management_groups_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_management_groups_list(self, resource_group): response = self.client.management_groups.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py index dcd1005b384e..b39a45327aeb 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_operation_statuses_get(self, resource_group): response = self.client.operation_statuses.get( location="str", async_operation_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py index b8c5b5191f30..9f1dd3ea8cf4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operation_statuses_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_operation_statuses_get(self, resource_group): response = await self.client.operation_statuses.get( location="str", async_operation_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py index a71bbdeaec2f..89651f5e2a8a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations.py @@ -20,9 +20,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_operations_list(self, resource_group): response = self.client.operations.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py index 566b357e7439..3e392713c9a3 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_operations_async.py @@ -21,9 +21,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_operations_list(self, resource_group): response = self.client.operations.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py index 369cab55436b..b4d6f5c99db0 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_queries_list(self, resource_group): response = self.client.queries.list( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -32,7 +32,7 @@ def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_search(self, resource_group): + def test_queries_search(self, resource_group): response = self.client.queries.search( resource_group_name=resource_group.name, query_pack_name="str", @@ -40,7 +40,7 @@ def test_search(self, resource_group): "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, "tags": {"str": ["str"]}, }, - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -48,12 +48,12 @@ def test_search(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_queries_get(self, resource_group): response = self.client.queries.get( resource_group_name=resource_group.name, query_pack_name="str", id="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -61,7 +61,7 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_put(self, resource_group): + def test_queries_put(self, resource_group): response = self.client.queries.put( resource_group_name=resource_group.name, query_pack_name="str", @@ -88,7 +88,7 @@ def test_put(self, resource_group): "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -96,7 +96,7 @@ def test_put(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_update(self, resource_group): + def test_queries_update(self, resource_group): response = self.client.queries.update( resource_group_name=resource_group.name, query_pack_name="str", @@ -123,7 +123,7 @@ def test_update(self, resource_group): "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -131,12 +131,12 @@ def test_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_queries_delete(self, resource_group): response = self.client.queries.delete( resource_group_name=resource_group.name, query_pack_name="str", id="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py index d466472e6c15..2ff65937fac4 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_queries_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_queries_list(self, resource_group): response = self.client.queries.list( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -33,7 +33,7 @@ async def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_search(self, resource_group): + async def test_queries_search(self, resource_group): response = self.client.queries.search( resource_group_name=resource_group.name, query_pack_name="str", @@ -41,7 +41,7 @@ async def test_search(self, resource_group): "related": {"categories": ["str"], "resourceTypes": ["str"], "solutions": ["str"]}, "tags": {"str": ["str"]}, }, - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -49,12 +49,12 @@ async def test_search(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_queries_get(self, resource_group): response = await self.client.queries.get( resource_group_name=resource_group.name, query_pack_name="str", id="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -62,7 +62,7 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_put(self, resource_group): + async def test_queries_put(self, resource_group): response = await self.client.queries.put( resource_group_name=resource_group.name, query_pack_name="str", @@ -89,7 +89,7 @@ async def test_put(self, resource_group): "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -97,7 +97,7 @@ async def test_put(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_update(self, resource_group): + async def test_queries_update(self, resource_group): response = await self.client.queries.update( resource_group_name=resource_group.name, query_pack_name="str", @@ -124,7 +124,7 @@ async def test_update(self, resource_group): "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -132,12 +132,12 @@ async def test_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_queries_delete(self, resource_group): response = await self.client.queries.delete( resource_group_name=resource_group.name, query_pack_name="str", id="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py index e201d3e79fdc..e9b7fadd7bc8 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations.py @@ -20,9 +20,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_query_packs_list(self, resource_group): response = self.client.query_packs.list( - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -30,10 +30,10 @@ def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_resource_group(self, resource_group): + def test_query_packs_list_by_resource_group(self, resource_group): response = self.client.query_packs.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -41,7 +41,7 @@ def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update_without_name(self, resource_group): + def test_query_packs_create_or_update_without_name(self, resource_group): response = self.client.query_packs.create_or_update_without_name( resource_group_name=resource_group.name, log_analytics_query_pack_payload={ @@ -50,12 +50,20 @@ def test_create_or_update_without_name(self, resource_group): "name": "str", "provisioningState": "str", "queryPackId": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "tags": {"str": "str"}, "timeCreated": "2020-02-20 00:00:00", "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -63,11 +71,11 @@ def test_create_or_update_without_name(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_query_packs_delete(self, resource_group): response = self.client.query_packs.delete( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -75,11 +83,11 @@ def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_query_packs_get(self, resource_group): response = self.client.query_packs.get( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -87,7 +95,7 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_query_packs_create_or_update(self, resource_group): response = self.client.query_packs.create_or_update( resource_group_name=resource_group.name, query_pack_name="str", @@ -97,12 +105,20 @@ def test_create_or_update(self, resource_group): "name": "str", "provisioningState": "str", "queryPackId": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "tags": {"str": "str"}, "timeCreated": "2020-02-20 00:00:00", "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -110,12 +126,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_update_tags(self, resource_group): + def test_query_packs_update_tags(self, resource_group): response = self.client.query_packs.update_tags( resource_group_name=resource_group.name, query_pack_name="str", query_pack_tags={"tags": {"str": "str"}}, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py index e40f2126a2f0..91079533049c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_query_packs_operations_async.py @@ -21,9 +21,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_query_packs_list(self, resource_group): response = self.client.query_packs.list( - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -31,10 +31,10 @@ async def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_resource_group(self, resource_group): + async def test_query_packs_list_by_resource_group(self, resource_group): response = self.client.query_packs.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2019-09-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -42,7 +42,7 @@ async def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update_without_name(self, resource_group): + async def test_query_packs_create_or_update_without_name(self, resource_group): response = await self.client.query_packs.create_or_update_without_name( resource_group_name=resource_group.name, log_analytics_query_pack_payload={ @@ -51,12 +51,20 @@ async def test_create_or_update_without_name(self, resource_group): "name": "str", "provisioningState": "str", "queryPackId": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "tags": {"str": "str"}, "timeCreated": "2020-02-20 00:00:00", "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -64,11 +72,11 @@ async def test_create_or_update_without_name(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_query_packs_delete(self, resource_group): response = await self.client.query_packs.delete( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -76,11 +84,11 @@ async def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_query_packs_get(self, resource_group): response = await self.client.query_packs.get( resource_group_name=resource_group.name, query_pack_name="str", - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -88,7 +96,7 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_query_packs_create_or_update(self, resource_group): response = await self.client.query_packs.create_or_update( resource_group_name=resource_group.name, query_pack_name="str", @@ -98,12 +106,20 @@ async def test_create_or_update(self, resource_group): "name": "str", "provisioningState": "str", "queryPackId": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, "tags": {"str": "str"}, "timeCreated": "2020-02-20 00:00:00", "timeModified": "2020-02-20 00:00:00", "type": "str", }, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -111,12 +127,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_update_tags(self, resource_group): + async def test_query_packs_update_tags(self, resource_group): response = await self.client.query_packs.update_tags( resource_group_name=resource_group.name, query_pack_name="str", query_pack_tags={"tags": {"str": "str"}}, - api_version="2019-09-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py index 732c7b0741e9..402dd7dedcc9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations.py @@ -20,12 +20,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_saved_searches_delete(self, resource_group): response = self.client.saved_searches.delete( resource_group_name=resource_group.name, workspace_name="str", saved_search_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -33,7 +33,7 @@ def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_saved_searches_create_or_update(self, resource_group): response = self.client.saved_searches.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -51,7 +51,7 @@ def test_create_or_update(self, resource_group): "type": "str", "version": 0, }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -59,12 +59,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_saved_searches_get(self, resource_group): response = self.client.saved_searches.get( resource_group_name=resource_group.name, workspace_name="str", saved_search_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -72,11 +72,11 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_saved_searches_list_by_workspace(self, resource_group): response = self.client.saved_searches.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py index 7afeabdffb9f..3df10a17dd6d 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_saved_searches_operations_async.py @@ -21,12 +21,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_saved_searches_delete(self, resource_group): response = await self.client.saved_searches.delete( resource_group_name=resource_group.name, workspace_name="str", saved_search_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -34,7 +34,7 @@ async def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_saved_searches_create_or_update(self, resource_group): response = await self.client.saved_searches.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -52,7 +52,7 @@ async def test_create_or_update(self, resource_group): "type": "str", "version": 0, }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -60,12 +60,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_saved_searches_get(self, resource_group): response = await self.client.saved_searches.get( resource_group_name=resource_group.name, workspace_name="str", saved_search_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -73,11 +73,11 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_saved_searches_list_by_workspace(self, resource_group): response = await self.client.saved_searches.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py index 4af4e214066e..163ab9356293 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_schema_get(self, resource_group): response = self.client.schema.get( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py index a968c33a96d3..7c6d6f2bca20 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_schema_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_schema_get(self, resource_group): response = await self.client.schema.get( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py index c3f672c8316e..1137c49957a9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get_shared_keys(self, resource_group): + def test_shared_keys_get_shared_keys(self, resource_group): response = self.client.shared_keys.get_shared_keys( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -32,11 +32,11 @@ def test_get_shared_keys(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_regenerate(self, resource_group): + def test_shared_keys_regenerate(self, resource_group): response = self.client.shared_keys.regenerate( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py index f38805abe402..2f6cb976c1f1 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_shared_keys_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get_shared_keys(self, resource_group): + async def test_shared_keys_get_shared_keys(self, resource_group): response = await self.client.shared_keys.get_shared_keys( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -33,11 +33,11 @@ async def test_get_shared_keys(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_regenerate(self, resource_group): + async def test_shared_keys_regenerate(self, resource_group): response = await self.client.shared_keys.regenerate( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py index f36d19061e8c..c0712edabba5 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations.py @@ -20,7 +20,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_create_or_update(self, resource_group): + def test_storage_insight_configs_create_or_update(self, resource_group): response = self.client.storage_insight_configs.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -36,7 +36,7 @@ def test_create_or_update(self, resource_group): "tags": {"str": "str"}, "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -44,12 +44,12 @@ def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_storage_insight_configs_get(self, resource_group): response = self.client.storage_insight_configs.get( resource_group_name=resource_group.name, workspace_name="str", storage_insight_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -57,12 +57,12 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_delete(self, resource_group): + def test_storage_insight_configs_delete(self, resource_group): response = self.client.storage_insight_configs.delete( resource_group_name=resource_group.name, workspace_name="str", storage_insight_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -70,11 +70,11 @@ def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_storage_insight_configs_list_by_workspace(self, resource_group): response = self.client.storage_insight_configs.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py index 7a6dffb0491e..ed0e359b102f 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_storage_insight_configs_operations_async.py @@ -21,7 +21,7 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_create_or_update(self, resource_group): + async def test_storage_insight_configs_create_or_update(self, resource_group): response = await self.client.storage_insight_configs.create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -37,7 +37,7 @@ async def test_create_or_update(self, resource_group): "tags": {"str": "str"}, "type": "str", }, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -45,12 +45,12 @@ async def test_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_storage_insight_configs_get(self, resource_group): response = await self.client.storage_insight_configs.get( resource_group_name=resource_group.name, workspace_name="str", storage_insight_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -58,12 +58,12 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_delete(self, resource_group): + async def test_storage_insight_configs_delete(self, resource_group): response = await self.client.storage_insight_configs.delete( resource_group_name=resource_group.name, workspace_name="str", storage_insight_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -71,11 +71,11 @@ async def test_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_storage_insight_configs_list_by_workspace(self, resource_group): response = self.client.storage_insight_configs.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations.py new file mode 100644 index 000000000000..4168b6431707 --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations.py @@ -0,0 +1,137 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSummaryLogsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_list_by_workspace(self, resource_group): + response = self.client.summary_logs.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_begin_create_or_update(self, resource_group): + response = self.client.summary_logs.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + parameters={ + "description": "str", + "displayName": "str", + "id": "str", + "isActive": bool, + "name": "str", + "provisioningState": "str", + "ruleDefinition": { + "binDelay": 0, + "binSize": 0, + "binStartTime": "2020-02-20 00:00:00", + "destinationTable": "str", + "query": "str", + "timeSelector": "str", + }, + "ruleType": "str", + "statusCode": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_get(self, resource_group): + response = self.client.summary_logs.get( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_begin_delete(self, resource_group): + response = self.client.summary_logs.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_begin_start(self, resource_group): + response = self.client.summary_logs.begin_start( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_stop(self, resource_group): + response = self.client.summary_logs.stop( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_summary_logs_begin_retry_bin(self, resource_group): + response = self.client.summary_logs.begin_retry_bin( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + parameters={"properties": {"retryBinStartTime": "2020-02-20 00:00:00"}}, + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations_async.py new file mode 100644 index 000000000000..cd599111f76c --- /dev/null +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_summary_logs_operations_async.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.loganalytics.aio import LogAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLogAnalyticsManagementSummaryLogsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(LogAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_list_by_workspace(self, resource_group): + response = self.client.summary_logs.list_by_workspace( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_begin_create_or_update(self, resource_group): + response = await ( + await self.client.summary_logs.begin_create_or_update( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + parameters={ + "description": "str", + "displayName": "str", + "id": "str", + "isActive": bool, + "name": "str", + "provisioningState": "str", + "ruleDefinition": { + "binDelay": 0, + "binSize": 0, + "binStartTime": "2020-02-20 00:00:00", + "destinationTable": "str", + "query": "str", + "timeSelector": "str", + }, + "ruleType": "str", + "statusCode": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_get(self, resource_group): + response = await self.client.summary_logs.get( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_begin_delete(self, resource_group): + response = await ( + await self.client.summary_logs.begin_delete( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_begin_start(self, resource_group): + response = await ( + await self.client.summary_logs.begin_start( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_stop(self, resource_group): + response = await self.client.summary_logs.stop( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_summary_logs_begin_retry_bin(self, resource_group): + response = await ( + await self.client.summary_logs.begin_retry_bin( + resource_group_name=resource_group.name, + workspace_name="str", + summary_logs_name="str", + parameters={"properties": {"retryBinStartTime": "2020-02-20 00:00:00"}}, + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py index 157b5ce8fb0a..dc9a21faf321 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_workspace(self, resource_group): + def test_tables_list_by_workspace(self, resource_group): response = self.client.tables.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -32,7 +32,7 @@ def test_list_by_workspace(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_create_or_update(self, resource_group): + def test_tables_begin_create_or_update(self, resource_group): response = self.client.tables.begin_create_or_update( resource_group_name=resource_group.name, workspace_name="str", @@ -107,7 +107,7 @@ def test_begin_create_or_update(self, resource_group): "totalRetentionInDaysAsDefault": bool, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -115,7 +115,7 @@ def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_update(self, resource_group): + def test_tables_begin_update(self, resource_group): response = self.client.tables.begin_update( resource_group_name=resource_group.name, workspace_name="str", @@ -190,7 +190,7 @@ def test_begin_update(self, resource_group): "totalRetentionInDaysAsDefault": bool, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -198,12 +198,12 @@ def test_begin_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_tables_get(self, resource_group): response = self.client.tables.get( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -211,12 +211,12 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_delete(self, resource_group): + def test_tables_begin_delete(self, resource_group): response = self.client.tables.begin_delete( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -224,12 +224,12 @@ def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_migrate(self, resource_group): + def test_tables_migrate(self, resource_group): response = self.client.tables.migrate( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -237,12 +237,12 @@ def test_migrate(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_cancel_search(self, resource_group): + def test_tables_cancel_search(self, resource_group): response = self.client.tables.cancel_search( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py index 11d406880927..d7e39ca721c2 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_tables_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_workspace(self, resource_group): + async def test_tables_list_by_workspace(self, resource_group): response = self.client.tables.list_by_workspace( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -33,7 +33,7 @@ async def test_list_by_workspace(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_create_or_update(self, resource_group): + async def test_tables_begin_create_or_update(self, resource_group): response = await ( await self.client.tables.begin_create_or_update( resource_group_name=resource_group.name, @@ -109,7 +109,7 @@ async def test_begin_create_or_update(self, resource_group): "totalRetentionInDaysAsDefault": bool, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -118,7 +118,7 @@ async def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_update(self, resource_group): + async def test_tables_begin_update(self, resource_group): response = await ( await self.client.tables.begin_update( resource_group_name=resource_group.name, @@ -194,7 +194,7 @@ async def test_begin_update(self, resource_group): "totalRetentionInDaysAsDefault": bool, "type": "str", }, - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -203,12 +203,12 @@ async def test_begin_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_tables_get(self, resource_group): response = await self.client.tables.get( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -216,13 +216,13 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_delete(self, resource_group): + async def test_tables_begin_delete(self, resource_group): response = await ( await self.client.tables.begin_delete( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -231,12 +231,12 @@ async def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_migrate(self, resource_group): + async def test_tables_migrate(self, resource_group): response = await self.client.tables.migrate( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -244,12 +244,12 @@ async def test_migrate(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_cancel_search(self, resource_group): + async def test_tables_cancel_search(self, resource_group): response = await self.client.tables.cancel_search( resource_group_name=resource_group.name, workspace_name="str", table_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py index 64bc44c3d434..1106d4e81e2e 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations.py @@ -20,11 +20,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_usages_list(self, resource_group): response = self.client.usages.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py index c1a19dcf2b39..9a8f990d0d17 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_usages_operations_async.py @@ -21,11 +21,11 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_usages_list(self, resource_group): response = self.client.usages.list( resource_group_name=resource_group.name, workspace_name="str", - api_version="2020-08-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py index ef87dcf94da4..ff1de6f40d2a 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations.py @@ -20,12 +20,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_purge(self, resource_group): + def test_workspace_purge_purge(self, resource_group): response = self.client.workspace_purge.purge( resource_group_name=resource_group.name, workspace_name="str", body={"filters": [{"column": "str", "key": "str", "operator": "str", "value": {}}], "table": "str"}, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -33,12 +33,12 @@ def test_purge(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get_purge_status(self, resource_group): + def test_workspace_purge_get_purge_status(self, resource_group): response = self.client.workspace_purge.get_purge_status( resource_group_name=resource_group.name, workspace_name="str", purge_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py index ce90884ab927..1006de7cd7f9 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspace_purge_operations_async.py @@ -21,12 +21,12 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_purge(self, resource_group): + async def test_workspace_purge_purge(self, resource_group): response = await self.client.workspace_purge.purge( resource_group_name=resource_group.name, workspace_name="str", body={"filters": [{"column": "str", "key": "str", "operator": "str", "value": {}}], "table": "str"}, - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -34,12 +34,12 @@ async def test_purge(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get_purge_status(self, resource_group): + async def test_workspace_purge_get_purge_status(self, resource_group): response = await self.client.workspace_purge.get_purge_status( resource_group_name=resource_group.name, workspace_name="str", purge_id="str", - api_version="2020-08-01", + api_version="2025-07-01", ) # please add some check logic here by yourself diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py index 66fc8f15243d..70657b567a6c 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations.py @@ -20,9 +20,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list(self, resource_group): + def test_workspaces_list(self, resource_group): response = self.client.workspaces.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -30,10 +30,10 @@ def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_list_by_resource_group(self, resource_group): + def test_workspaces_list_by_resource_group(self, resource_group): response = self.client.workspaces.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r for r in response] # please add some check logic here by yourself @@ -41,22 +41,25 @@ def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_create_or_update(self, resource_group): + def test_workspaces_begin_create_or_update(self, resource_group): response = self.client.workspaces.begin_create_or_update( resource_group_name=resource_group.name, workspace_name="str", parameters={ "location": "str", - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "customerId": "str", "defaultDataCollectionRuleResourceId": "str", "etag": "str", + "failover": {"lastModifiedDate": "2020-02-20 00:00:00", "state": "str"}, "features": { + "associations": ["str"], "clusterResourceId": "str", "disableLocalAuth": bool, "enableDataExport": bool, "enableLogAccessUsingOnlyResourcePermissions": bool, "immediatePurgeDataOn30Days": bool, + "unifiedSentinelBillingOnly": bool, }, "forceCmkForQuery": bool, "id": "str", @@ -66,14 +69,21 @@ def test_begin_create_or_update(self, resource_group): "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "modifiedDate": "str", + "modifiedDate": "2020-02-20 00:00:00", "name": "str", "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], "provisioningState": "str", "publicNetworkAccessForIngestion": "Enabled", "publicNetworkAccessForQuery": "Enabled", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "retentionInDays": 0, - "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "2020-02-20 00:00:00"}, "systemData": { "createdAt": "2020-02-20 00:00:00", "createdBy": "str", @@ -86,7 +96,7 @@ def test_begin_create_or_update(self, resource_group): "type": "str", "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, }, - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -94,11 +104,11 @@ def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_begin_delete(self, resource_group): + def test_workspaces_begin_delete(self, resource_group): response = self.client.workspaces.begin_delete( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ).result() # call '.result()' to poll until service return final result # please add some check logic here by yourself @@ -106,11 +116,11 @@ def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_get(self, resource_group): + def test_workspaces_get(self, resource_group): response = self.client.workspaces.get( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -118,21 +128,24 @@ def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy - def test_update(self, resource_group): + def test_workspaces_update(self, resource_group): response = self.client.workspaces.update( resource_group_name=resource_group.name, workspace_name="str", parameters={ - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "customerId": "str", "defaultDataCollectionRuleResourceId": "str", "etag": "str", + "failover": {"lastModifiedDate": "2020-02-20 00:00:00", "state": "str"}, "features": { + "associations": ["str"], "clusterResourceId": "str", "disableLocalAuth": bool, "enableDataExport": bool, "enableLogAccessUsingOnlyResourcePermissions": bool, "immediatePurgeDataOn30Days": bool, + "unifiedSentinelBillingOnly": bool, }, "forceCmkForQuery": bool, "id": "str", @@ -142,20 +155,90 @@ def test_update(self, resource_group): "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "modifiedDate": "str", + "modifiedDate": "2020-02-20 00:00:00", "name": "str", "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], "provisioningState": "str", "publicNetworkAccessForIngestion": "Enabled", "publicNetworkAccessForQuery": "Enabled", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "retentionInDays": 0, - "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "2020-02-20 00:00:00"}, "tags": {"str": "str"}, "type": "str", "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, }, - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_failover(self, resource_group): + response = self.client.workspaces.begin_failover( + resource_group_name=resource_group.name, + location="str", + workspace_name="str", + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_failback(self, resource_group): + response = self.client.workspaces.begin_failback( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_list_nsp(self, resource_group): + response = self.client.workspaces.list_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_get_nsp(self, resource_group): + response = self.client.workspaces.get_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + network_security_perimeter_configuration_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_workspaces_begin_reconcile_nsp(self, resource_group): + response = self.client.workspaces.begin_reconcile_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + network_security_perimeter_configuration_name="str", + api_version="2025-07-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py index bbb4da741d11..0b923cc7e082 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py +++ b/sdk/loganalytics/azure-mgmt-loganalytics/generated_tests/test_log_analytics_management_workspaces_operations_async.py @@ -21,9 +21,9 @@ def setup_method(self, method): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list(self, resource_group): + async def test_workspaces_list(self, resource_group): response = self.client.workspaces.list( - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -31,10 +31,10 @@ async def test_list(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_list_by_resource_group(self, resource_group): + async def test_workspaces_list_by_resource_group(self, resource_group): response = self.client.workspaces.list_by_resource_group( resource_group_name=resource_group.name, - api_version="2022-10-01", + api_version="2025-07-01", ) result = [r async for r in response] # please add some check logic here by yourself @@ -42,23 +42,26 @@ async def test_list_by_resource_group(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_create_or_update(self, resource_group): + async def test_workspaces_begin_create_or_update(self, resource_group): response = await ( await self.client.workspaces.begin_create_or_update( resource_group_name=resource_group.name, workspace_name="str", parameters={ "location": "str", - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "customerId": "str", "defaultDataCollectionRuleResourceId": "str", "etag": "str", + "failover": {"lastModifiedDate": "2020-02-20 00:00:00", "state": "str"}, "features": { + "associations": ["str"], "clusterResourceId": "str", "disableLocalAuth": bool, "enableDataExport": bool, "enableLogAccessUsingOnlyResourcePermissions": bool, "immediatePurgeDataOn30Days": bool, + "unifiedSentinelBillingOnly": bool, }, "forceCmkForQuery": bool, "id": "str", @@ -68,14 +71,21 @@ async def test_begin_create_or_update(self, resource_group): "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "modifiedDate": "str", + "modifiedDate": "2020-02-20 00:00:00", "name": "str", "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], "provisioningState": "str", "publicNetworkAccessForIngestion": "Enabled", "publicNetworkAccessForQuery": "Enabled", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "retentionInDays": 0, - "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "2020-02-20 00:00:00"}, "systemData": { "createdAt": "2020-02-20 00:00:00", "createdBy": "str", @@ -92,7 +102,7 @@ async def test_begin_create_or_update(self, resource_group): "quotaNextResetTime": "str", }, }, - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -101,12 +111,12 @@ async def test_begin_create_or_update(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_begin_delete(self, resource_group): + async def test_workspaces_begin_delete(self, resource_group): response = await ( await self.client.workspaces.begin_delete( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) ).result() # call '.result()' to poll until service return final result @@ -115,11 +125,11 @@ async def test_begin_delete(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_get(self, resource_group): + async def test_workspaces_get(self, resource_group): response = await self.client.workspaces.get( resource_group_name=resource_group.name, workspace_name="str", - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself @@ -127,21 +137,24 @@ async def test_get(self, resource_group): @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async - async def test_update(self, resource_group): + async def test_workspaces_update(self, resource_group): response = await self.client.workspaces.update( resource_group_name=resource_group.name, workspace_name="str", parameters={ - "createdDate": "str", + "createdDate": "2020-02-20 00:00:00", "customerId": "str", "defaultDataCollectionRuleResourceId": "str", "etag": "str", + "failover": {"lastModifiedDate": "2020-02-20 00:00:00", "state": "str"}, "features": { + "associations": ["str"], "clusterResourceId": "str", "disableLocalAuth": bool, "enableDataExport": bool, "enableLogAccessUsingOnlyResourcePermissions": bool, "immediatePurgeDataOn30Days": bool, + "unifiedSentinelBillingOnly": bool, }, "forceCmkForQuery": bool, "id": "str", @@ -151,20 +164,96 @@ async def test_update(self, resource_group): "tenantId": "str", "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, }, - "modifiedDate": "str", + "modifiedDate": "2020-02-20 00:00:00", "name": "str", "privateLinkScopedResources": [{"resourceId": "str", "scopeId": "str"}], "provisioningState": "str", "publicNetworkAccessForIngestion": "Enabled", "publicNetworkAccessForQuery": "Enabled", + "replication": { + "createdDate": "2020-02-20 00:00:00", + "enabled": bool, + "lastModifiedDate": "2020-02-20 00:00:00", + "location": "str", + "provisioningState": "str", + }, "retentionInDays": 0, - "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "str"}, + "sku": {"name": "str", "capacityReservationLevel": 0, "lastSkuUpdate": "2020-02-20 00:00:00"}, "tags": {"str": "str"}, "type": "str", "workspaceCapping": {"dailyQuotaGb": 0.0, "dataIngestionStatus": "str", "quotaNextResetTime": "str"}, }, - api_version="2022-10-01", + api_version="2025-07-01", ) # please add some check logic here by yourself # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_failover(self, resource_group): + response = await ( + await self.client.workspaces.begin_failover( + resource_group_name=resource_group.name, + location="str", + workspace_name="str", + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_failback(self, resource_group): + response = await ( + await self.client.workspaces.begin_failback( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_list_nsp(self, resource_group): + response = self.client.workspaces.list_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + api_version="2025-07-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_get_nsp(self, resource_group): + response = await self.client.workspaces.get_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + network_security_perimeter_configuration_name="str", + api_version="2025-07-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_workspaces_begin_reconcile_nsp(self, resource_group): + response = await ( + await self.client.workspaces.begin_reconcile_nsp( + resource_group_name=resource_group.name, + workspace_name="str", + network_security_perimeter_configuration_name="str", + api_version="2025-07-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/pyproject.toml b/sdk/loganalytics/azure-mgmt-loganalytics/pyproject.toml index 540da07d41af..2ce557b5636b 100644 --- a/sdk/loganalytics/azure-mgmt-loganalytics/pyproject.toml +++ b/sdk/loganalytics/azure-mgmt-loganalytics/pyproject.toml @@ -1,6 +1,88 @@ +[build-system] +requires = [ + "setuptools>=77.0.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-mgmt-loganalytics" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Azure Loganalytics Management Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = [ + "azure", + "azure sdk", +] +dependencies = [ + "msrest>=0.7.1", + "azure-mgmt-core>=1.6.0", + "typing-extensions>=4.6.0", +] +dynamic = [ + "version", + "readme", +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic.version] +attr = "azure.mgmt.loganalytics._version.VERSION" + +[tool.setuptools.dynamic.readme] +file = [ + "README.md", + "CHANGELOG.md", +] +content-type = "text/markdown" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.mgmt", +] + +[tool.setuptools.package-data] +pytyped = [ + "py.typed", +] + [tool.azure-sdk-build] breaking = false mypy = false pyright = false type_check_samples = false verifytypes = false + +[packaging] +package_name = "azure-mgmt-loganalytics" +package_pprint_name = "Log Analytics Management" +package_doc_id = "" +is_stable = true +sample_link = "" +title = "LogAnalyticsManagementClient" +package_nspkg = "azure-mgmt-nspkg" +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true +exclude_folders = "" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml b/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml deleted file mode 100644 index 6da671ea6af5..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/sdk_packaging.toml +++ /dev/null @@ -1,7 +0,0 @@ -[packaging] -package_name = "azure-mgmt-loganalytics" -package_pprint_name = "Log Analytics Management" -package_doc_id = "" -is_stable = true -sample_link = "" -title = "LogAnalyticsManagementClient" diff --git a/sdk/loganalytics/azure-mgmt-loganalytics/setup.py b/sdk/loganalytics/azure-mgmt-loganalytics/setup.py deleted file mode 100644 index a670f6068bab..000000000000 --- a/sdk/loganalytics/azure-mgmt-loganalytics/setup.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import re -import os.path -from io import open -from setuptools import find_packages, setup - -# Change the PACKAGE_NAME only to change folder and different name -PACKAGE_NAME = "azure-mgmt-loganalytics" -PACKAGE_PPRINT_NAME = "Log Analytics Management" - -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace("-", ".") - -# Version extraction inspired from 'requests' -with open( - ( - os.path.join(package_folder_path, "version.py") - if os.path.exists(os.path.join(package_folder_path, "version.py")) - else os.path.join(package_folder_path, "_version.py") - ), - "r", -) as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) - -if not version: - raise RuntimeError("Cannot find version information") - -with open("README.md", encoding="utf-8") as f: - readme = f.read() -with open("CHANGELOG.md", encoding="utf-8") as f: - changelog = f.read() - -setup( - name=PACKAGE_NAME, - version=version, - description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), - long_description=readme + "\n\n" + changelog, - long_description_content_type="text/markdown", - license="MIT License", - author="Microsoft Corporation", - author_email="azpysdkhelp@microsoft.com", - url="https://github.com/Azure/azure-sdk-for-python", - keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product - classifiers=[ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "License :: OSI Approved :: MIT License", - ], - zip_safe=False, - packages=find_packages( - exclude=[ - "tests", - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "azure.mgmt", - ] - ), - include_package_data=True, - package_data={ - "pytyped": ["py.typed"], - }, - install_requires=[ - "isodate>=0.6.1", - "typing-extensions>=4.6.0", - "azure-common>=1.1", - "azure-mgmt-core>=1.3.2", - ], - python_requires=">=3.8", -)