diff --git a/sdk/batch/Azure.Compute.Batch/api/Azure.Compute.Batch.netstandard2.0.cs b/sdk/batch/Azure.Compute.Batch/api/Azure.Compute.Batch.netstandard2.0.cs index 1d2de51fc632a..b7c16a6b6bb62 100644 --- a/sdk/batch/Azure.Compute.Batch/api/Azure.Compute.Batch.netstandard2.0.cs +++ b/sdk/batch/Azure.Compute.Batch/api/Azure.Compute.Batch.netstandard2.0.cs @@ -21,6 +21,7 @@ public partial class AffinityInfo : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AffinityInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -50,6 +51,7 @@ public partial class AuthenticationTokenSettings : System.ClientModel.Primitives { public AuthenticationTokenSettings() { } public System.Collections.Generic.IList Access { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AuthenticationTokenSettings System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AuthenticationTokenSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -63,6 +65,7 @@ public AutomaticOsUpgradePolicy() { } public bool? EnableAutomaticOsUpgrade { get { throw null; } set { } } public bool? OsRollingUpgradeDeferral { get { throw null; } set { } } public bool? UseRollingUpgradePolicy { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutomaticOsUpgradePolicy System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutomaticOsUpgradePolicy System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -75,6 +78,7 @@ internal AutoScaleRun() { } public Azure.Compute.Batch.AutoScaleRunError Error { get { throw null; } } public string Results { get { throw null; } } public System.DateTimeOffset Timestamp { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoScaleRun System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoScaleRun System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -87,6 +91,7 @@ internal AutoScaleRunError() { } public string Code { get { throw null; } } public string Message { get { throw null; } } public System.Collections.Generic.IReadOnlyList Values { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoScaleRunError System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoScaleRunError System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -116,6 +121,7 @@ public partial class AutoUserSpecification : System.ClientModel.Primitives.IJson public AutoUserSpecification() { } public Azure.Compute.Batch.ElevationLevel? ElevationLevel { get { throw null; } set { } } public Azure.Compute.Batch.AutoUserScope? Scope { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoUserSpecification System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AutoUserSpecification System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -132,6 +138,7 @@ public AzureBlobFileSystemConfiguration(string accountName, string containerName public Azure.Compute.Batch.BatchNodeIdentityReference IdentityReference { get { throw null; } set { } } public string RelativeMountPath { get { throw null; } set { } } public string SasKey { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AzureBlobFileSystemConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AzureBlobFileSystemConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -146,6 +153,7 @@ public AzureFileShareConfiguration(string accountName, string azureFileUrl, stri public string AzureFileUrl { get { throw null; } set { } } public string MountOptions { get { throw null; } set { } } public string RelativeMountPath { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AzureFileShareConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.AzureFileShareConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -158,6 +166,7 @@ internal BatchApplication() { } public string DisplayName { get { throw null; } } public string Id { get { throw null; } } public System.Collections.Generic.IReadOnlyList Versions { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchApplication System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchApplication System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -169,6 +178,7 @@ public partial class BatchApplicationPackageReference : System.ClientModel.Primi public BatchApplicationPackageReference(string applicationId) { } public string ApplicationId { get { throw null; } set { } } public string Version { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchApplicationPackageReference System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchApplicationPackageReference System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -182,6 +192,7 @@ public BatchAutoPoolSpecification(Azure.Compute.Batch.BatchPoolLifetimeOption po public bool? KeepAlive { get { throw null; } set { } } public Azure.Compute.Batch.BatchPoolSpecification Pool { get { throw null; } set { } } public Azure.Compute.Batch.BatchPoolLifetimeOption PoolLifetimeOption { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchAutoPoolSpecification System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchAutoPoolSpecification System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -220,10 +231,14 @@ public BatchClient(System.Uri endpoint, Azure.Core.TokenCredential credential, A public virtual Azure.Response CreateTaskCollection(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> CreateTaskCollectionAsync(string jobId, Azure.Compute.Batch.BatchTaskGroup taskCollection, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task CreateTaskCollectionAsync(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response DeleteJob(string jobId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual System.Threading.Tasks.Task DeleteJobAsync(string jobId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response DeleteJobSchedule(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual System.Threading.Tasks.Task DeleteJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeallocateNode(string poolId, string nodeId, Azure.Compute.Batch.BatchNodeDeallocateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response DeallocateNode(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeallocateNodeAsync(string poolId, string nodeId, Azure.Compute.Batch.BatchNodeDeallocateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeallocateNodeAsync(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteJob(string jobId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteJobAsync(string jobId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteJobSchedule(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response DeleteNodeFile(string poolId, string nodeId, string filePath, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? recursive = default(bool?), Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task DeleteNodeFileAsync(string poolId, string nodeId, string filePath, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? recursive = default(bool?), Azure.RequestContext context = null) { throw null; } public virtual Azure.Response DeleteNodeUser(string poolId, string nodeId, string userName, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } @@ -338,10 +353,10 @@ public BatchClient(System.Uri endpoint, Azure.Core.TokenCredential credential, A public virtual Azure.Pageable GetPools(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), string filter = null, System.Collections.Generic.IEnumerable select = null, System.Collections.Generic.IEnumerable expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.AsyncPageable GetPoolsAsync(int? timeOutInSeconds, System.DateTimeOffset? ocpdate, int? maxresults, string filter, System.Collections.Generic.IEnumerable select, System.Collections.Generic.IEnumerable expand, Azure.RequestContext context) { throw null; } public virtual Azure.AsyncPageable GetPoolsAsync(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), string filter = null, System.Collections.Generic.IEnumerable select = null, System.Collections.Generic.IEnumerable expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Pageable GetPoolUsageMetrics(int? timeOutInSeconds, System.DateTimeOffset? ocpdate, int? maxresults, System.DateTimeOffset? starttime, System.DateTimeOffset? endtime, string filter, Azure.RequestContext context) { throw null; } - public virtual Azure.Pageable GetPoolUsageMetrics(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), System.DateTimeOffset? starttime = default(System.DateTimeOffset?), System.DateTimeOffset? endtime = default(System.DateTimeOffset?), string filter = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds, System.DateTimeOffset? ocpdate, int? maxresults, System.DateTimeOffset? starttime, System.DateTimeOffset? endtime, string filter, Azure.RequestContext context) { throw null; } - public virtual Azure.AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), System.DateTimeOffset? starttime = default(System.DateTimeOffset?), System.DateTimeOffset? endtime = default(System.DateTimeOffset?), string filter = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetPoolUsageMetrics(int? timeOutInSeconds, System.DateTimeOffset? ocpdate, int? maxresults, System.DateTimeOffset? startTime, System.DateTimeOffset? endtime, string filter, Azure.RequestContext context) { throw null; } + public virtual Azure.Pageable GetPoolUsageMetrics(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), System.DateTimeOffset? startTime = default(System.DateTimeOffset?), System.DateTimeOffset? endtime = default(System.DateTimeOffset?), string filter = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds, System.DateTimeOffset? ocpdate, int? maxresults, System.DateTimeOffset? startTime, System.DateTimeOffset? endtime, string filter, Azure.RequestContext context) { throw null; } + public virtual Azure.AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), int? maxresults = default(int?), System.DateTimeOffset? startTime = default(System.DateTimeOffset?), System.DateTimeOffset? endtime = default(System.DateTimeOffset?), string filter = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Pageable GetSubTasks(string jobId, string taskId, int? timeOutInSeconds, System.DateTimeOffset? ocpdate, System.Collections.Generic.IEnumerable select, Azure.RequestContext context) { throw null; } public virtual Azure.Pageable GetSubTasks(string jobId, string taskId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.AsyncPageable GetSubTasksAsync(string jobId, string taskId, int? timeOutInSeconds, System.DateTimeOffset? ocpdate, System.Collections.Generic.IEnumerable select, Azure.RequestContext context) { throw null; } @@ -378,6 +393,10 @@ public BatchClient(System.Uri endpoint, Azure.Core.TokenCredential credential, A public virtual Azure.Response RebootNode(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task RebootNodeAsync(string poolId, string nodeId, Azure.Compute.Batch.BatchNodeRebootContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task RebootNodeAsync(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response ReimageNode(string poolId, string nodeId, Azure.Compute.Batch.BatchNodeReimageContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response ReimageNode(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task ReimageNodeAsync(string poolId, string nodeId, Azure.Compute.Batch.BatchNodeReimageContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task ReimageNodeAsync(string poolId, string nodeId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } public virtual Azure.Response RemoveNodes(string poolId, Azure.Compute.Batch.BatchNodeRemoveContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response RemoveNodes(string poolId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task RemoveNodesAsync(string poolId, Azure.Compute.Batch.BatchNodeRemoveContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -406,14 +425,16 @@ public BatchClient(System.Uri endpoint, Azure.Core.TokenCredential credential, A public virtual Azure.Response ResizePool(string poolId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task ResizePoolAsync(string poolId, Azure.Compute.Batch.BatchPoolResizeContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task ResizePoolAsync(string poolId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response StartNode(string poolId, string nodeId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task StartNodeAsync(string poolId, string nodeId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestContext context = null) { throw null; } public virtual Azure.Response StopPoolResize(string poolId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task StopPoolResizeAsync(string poolId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response TerminateJob(string jobId, Azure.Compute.Batch.BatchJobTerminateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response TerminateJob(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual System.Threading.Tasks.Task TerminateJobAsync(string jobId, Azure.Compute.Batch.BatchJobTerminateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task TerminateJobAsync(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response TerminateJobSchedule(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } - public virtual System.Threading.Tasks.Task TerminateJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response TerminateJob(string jobId, Azure.Compute.Batch.BatchJobTerminateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response TerminateJob(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task TerminateJobAsync(string jobId, Azure.Compute.Batch.BatchJobTerminateContent parameters = null, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task TerminateJobAsync(string jobId, Azure.Core.RequestContent content, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response TerminateJobSchedule(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task TerminateJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), bool? force = default(bool?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response TerminateTask(string jobId, string taskId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task TerminateTaskAsync(string jobId, string taskId, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response UpdateJob(string jobId, Azure.Compute.Batch.BatchJobUpdateContent job, int? timeOutInSeconds = default(int?), System.DateTimeOffset? ocpdate = default(System.DateTimeOffset?), Azure.RequestConditions requestConditions = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -435,10 +456,10 @@ public BatchClient(System.Uri endpoint, Azure.Core.TokenCredential credential, A } public partial class BatchClientOptions : Azure.Core.ClientOptions { - public BatchClientOptions(Azure.Compute.Batch.BatchClientOptions.ServiceVersion version = Azure.Compute.Batch.BatchClientOptions.ServiceVersion.V2024_02_01_19_0) { } + public BatchClientOptions(Azure.Compute.Batch.BatchClientOptions.ServiceVersion version = Azure.Compute.Batch.BatchClientOptions.ServiceVersion.V2024_07_01_20_0) { } public enum ServiceVersion { - V2024_02_01_19_0 = 1, + V2024_07_01_20_0 = 1, } } public partial class BatchError : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel @@ -447,6 +468,7 @@ internal BatchError() { } public string Code { get { throw null; } } public Azure.Compute.Batch.BatchErrorMessage Message { get { throw null; } } public System.Collections.Generic.IReadOnlyList Values { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchError System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchError System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -458,6 +480,7 @@ public partial class BatchErrorDetail : System.ClientModel.Primitives.IJsonModel internal BatchErrorDetail() { } public string Key { get { throw null; } } public string Value { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchErrorDetail System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchErrorDetail System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -469,6 +492,7 @@ public partial class BatchErrorMessage : System.ClientModel.Primitives.IJsonMode internal BatchErrorMessage() { } public string Lang { get { throw null; } } public string Value { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchErrorMessage System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchErrorMessage System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -512,6 +536,7 @@ public BatchJob(Azure.Compute.Batch.BatchPoolInfo poolInfo) { } public Azure.Compute.Batch.BatchJobStatistics Stats { get { throw null; } } public string Url { get { throw null; } } public bool? UsesTaskDependencies { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJob System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJob System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -542,6 +567,7 @@ public partial class BatchJobConstraints : System.ClientModel.Primitives.IJsonMo public BatchJobConstraints() { } public int? MaxTaskRetryCount { get { throw null; } set { } } public System.TimeSpan? MaxWallClockTime { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobConstraints System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobConstraints System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -567,6 +593,7 @@ public BatchJobCreateContent(string id, Azure.Compute.Batch.BatchPoolInfo poolIn public Azure.Compute.Batch.BatchPoolInfo PoolInfo { get { throw null; } } public int? Priority { get { throw null; } set { } } public bool? UsesTaskDependencies { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobCreateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobCreateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -577,6 +604,7 @@ public partial class BatchJobDisableContent : System.ClientModel.Primitives.IJso { public BatchJobDisableContent(Azure.Compute.Batch.DisableBatchJobOption disableTasks) { } public Azure.Compute.Batch.DisableBatchJobOption DisableTasks { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobDisableContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobDisableContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -591,6 +619,7 @@ internal BatchJobExecutionInfo() { } public Azure.Compute.Batch.BatchJobSchedulingError SchedulingError { get { throw null; } } public System.DateTimeOffset StartTime { get { throw null; } } public string TerminationReason { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -615,6 +644,7 @@ public BatchJobManagerTask(string id, string commandLine) { } public System.Collections.Generic.IList ResourceFiles { get { throw null; } } public bool? RunExclusive { get { throw null; } set { } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobManagerTask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobManagerTask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -623,8 +653,10 @@ public BatchJobManagerTask(string id, string commandLine) { } } public partial class BatchJobNetworkConfiguration : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public BatchJobNetworkConfiguration(string subnetId) { } + public BatchJobNetworkConfiguration(string subnetId, bool skipWithdrawFromVNet) { } + public bool SkipWithdrawFromVNet { get { throw null; } set { } } public string SubnetId { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobNetworkConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobNetworkConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -639,6 +671,7 @@ internal BatchJobPreparationAndReleaseTaskStatus() { } public string NodeId { get { throw null; } } public string NodeUrl { get { throw null; } } public string PoolId { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationAndReleaseTaskStatus System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationAndReleaseTaskStatus System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -657,6 +690,7 @@ public BatchJobPreparationTask(string commandLine) { } public System.Collections.Generic.IList ResourceFiles { get { throw null; } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } set { } } public bool? WaitForSuccess { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationTask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationTask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -677,6 +711,7 @@ internal BatchJobPreparationTaskExecutionInfo() { } public Azure.Compute.Batch.BatchJobPreparationTaskState State { get { throw null; } } public string TaskRootDirectory { get { throw null; } } public string TaskRootDirectoryUrl { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationTaskExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobPreparationTaskExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -712,6 +747,7 @@ public BatchJobReleaseTask(string commandLine) { } public System.Collections.Generic.IList ResourceFiles { get { throw null; } } public System.TimeSpan? RetentionTime { get { throw null; } set { } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobReleaseTask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobReleaseTask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -730,6 +766,7 @@ internal BatchJobReleaseTaskExecutionInfo() { } public Azure.Compute.Batch.BatchJobReleaseTaskState State { get { throw null; } } public string TaskRootDirectory { get { throw null; } } public string TaskRootDirectoryUrl { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobReleaseTaskExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobReleaseTaskExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -772,6 +809,7 @@ public BatchJobSchedule(Azure.Compute.Batch.BatchJobSpecification jobSpecificati public System.DateTimeOffset? StateTransitionTime { get { throw null; } } public Azure.Compute.Batch.BatchJobScheduleStatistics Stats { get { throw null; } } public string Url { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSchedule System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSchedule System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -785,6 +823,7 @@ public BatchJobScheduleConfiguration() { } public System.DateTimeOffset? DoNotRunUntil { get { throw null; } set { } } public System.TimeSpan? RecurrenceInterval { get { throw null; } set { } } public System.TimeSpan? StartWindow { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -799,6 +838,7 @@ public BatchJobScheduleCreateContent(string id, Azure.Compute.Batch.BatchJobSche public Azure.Compute.Batch.BatchJobSpecification JobSpecification { get { throw null; } } public System.Collections.Generic.IList Metadata { get { throw null; } } public Azure.Compute.Batch.BatchJobScheduleConfiguration Schedule { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleCreateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleCreateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -811,6 +851,7 @@ internal BatchJobScheduleExecutionInfo() { } public System.DateTimeOffset? EndTime { get { throw null; } } public System.DateTimeOffset? NextRunTime { get { throw null; } } public Azure.Compute.Batch.RecentBatchJob RecentJob { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -855,6 +896,7 @@ internal BatchJobScheduleStatistics() { } public System.TimeSpan WallClockTime { get { throw null; } } public float WriteIOGiB { get { throw null; } } public long WriteIOps { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -867,6 +909,7 @@ public BatchJobScheduleUpdateContent() { } public Azure.Compute.Batch.BatchJobSpecification JobSpecification { get { throw null; } set { } } public System.Collections.Generic.IList Metadata { get { throw null; } } public Azure.Compute.Batch.BatchJobScheduleConfiguration Schedule { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleUpdateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobScheduleUpdateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -880,6 +923,7 @@ internal BatchJobSchedulingError() { } public string Code { get { throw null; } } public System.Collections.Generic.IReadOnlyList Details { get { throw null; } } public string Message { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSchedulingError System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSchedulingError System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -904,6 +948,7 @@ public BatchJobSpecification(Azure.Compute.Batch.BatchPoolInfo poolInfo) { } public Azure.Compute.Batch.BatchPoolInfo PoolInfo { get { throw null; } set { } } public int? Priority { get { throw null; } set { } } public bool? UsesTaskDependencies { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSpecification System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobSpecification System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -950,6 +995,7 @@ internal BatchJobStatistics() { } public System.TimeSpan WallClockTime { get { throw null; } } public float WriteIOGiB { get { throw null; } } public long WriteIOps { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -960,6 +1006,7 @@ public partial class BatchJobTerminateContent : System.ClientModel.Primitives.IJ { public BatchJobTerminateContent() { } public string TerminationReason { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobTerminateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobTerminateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -973,9 +1020,11 @@ public BatchJobUpdateContent() { } public Azure.Compute.Batch.BatchJobConstraints Constraints { get { throw null; } set { } } public int? MaxParallelTasks { get { throw null; } set { } } public System.Collections.Generic.IList Metadata { get { throw null; } } + public Azure.Compute.Batch.BatchJobNetworkConfiguration NetworkConfiguration { get { throw null; } set { } } public Azure.Compute.Batch.OnAllBatchTasksComplete? OnAllTasksComplete { get { throw null; } set { } } public Azure.Compute.Batch.BatchPoolInfo PoolInfo { get { throw null; } set { } } public int? Priority { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobUpdateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchJobUpdateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1007,6 +1056,7 @@ internal BatchNode() { } public string Url { get { throw null; } } public Azure.Compute.Batch.VirtualMachineInfo VirtualMachineInfo { get { throw null; } } public string VmSize { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNode System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNode System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1018,6 +1068,7 @@ public partial class BatchNodeAgentInfo : System.ClientModel.Primitives.IJsonMod internal BatchNodeAgentInfo() { } public System.DateTimeOffset LastUpdateTime { get { throw null; } } public string Version { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeAgentInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeAgentInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1047,6 +1098,8 @@ public partial class BatchNodeCounts : System.ClientModel.Primitives.IJsonModel< { internal BatchNodeCounts() { } public int Creating { get { throw null; } } + public int Deallocated { get { throw null; } } + public int Deallocating { get { throw null; } } public int Idle { get { throw null; } } public int LeavingPool { get { throw null; } } public int Offline { get { throw null; } } @@ -1061,12 +1114,44 @@ internal BatchNodeCounts() { } public int Unusable { get { throw null; } } public int UpgradingOs { get { throw null; } } public int WaitingForStartTask { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeCounts System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeCounts System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class BatchNodeDeallocateContent : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public BatchNodeDeallocateContent() { } + public Azure.Compute.Batch.BatchNodeDeallocateOption? NodeDeallocateOption { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.BatchNodeDeallocateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.BatchNodeDeallocateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct BatchNodeDeallocateOption : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public BatchNodeDeallocateOption(string value) { throw null; } + public static Azure.Compute.Batch.BatchNodeDeallocateOption Requeue { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeDeallocateOption RetainedData { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeDeallocateOption TaskCompletion { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeDeallocateOption Terminate { get { throw null; } } + public bool Equals(Azure.Compute.Batch.BatchNodeDeallocateOption other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Compute.Batch.BatchNodeDeallocateOption left, Azure.Compute.Batch.BatchNodeDeallocateOption right) { throw null; } + public static implicit operator Azure.Compute.Batch.BatchNodeDeallocateOption (string value) { throw null; } + public static bool operator !=(Azure.Compute.Batch.BatchNodeDeallocateOption left, Azure.Compute.Batch.BatchNodeDeallocateOption right) { throw null; } + public override string ToString() { throw null; } + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct BatchNodeDeallocationOption : System.IEquatable { @@ -1091,6 +1176,7 @@ public partial class BatchNodeDisableSchedulingContent : System.ClientModel.Prim { public BatchNodeDisableSchedulingContent() { } public Azure.Compute.Batch.BatchNodeDisableSchedulingOption? NodeDisableSchedulingOption { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeDisableSchedulingContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeDisableSchedulingContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1120,6 +1206,7 @@ public partial class BatchNodeEndpointConfiguration : System.ClientModel.Primiti { internal BatchNodeEndpointConfiguration() { } public System.Collections.Generic.IReadOnlyList InboundEndpoints { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeEndpointConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeEndpointConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1132,6 +1219,7 @@ internal BatchNodeError() { } public string Code { get { throw null; } } public System.Collections.Generic.IReadOnlyList ErrorDetails { get { throw null; } } public string Message { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeError System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeError System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1145,6 +1233,7 @@ internal BatchNodeFile() { } public string Name { get { throw null; } } public Azure.Compute.Batch.FileProperties Properties { get { throw null; } } public string Url { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeFile System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeFile System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1173,6 +1262,7 @@ public partial class BatchNodeIdentityReference : System.ClientModel.Primitives. { public BatchNodeIdentityReference() { } public string ResourceId { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeIdentityReference System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeIdentityReference System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1188,6 +1278,7 @@ internal BatchNodeInfo() { } public string PoolId { get { throw null; } } public string TaskRootDirectory { get { throw null; } } public string TaskRootDirectoryUrl { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1198,6 +1289,7 @@ public partial class BatchNodePlacementConfiguration : System.ClientModel.Primit { public BatchNodePlacementConfiguration() { } public Azure.Compute.Batch.BatchNodePlacementPolicyType? Policy { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodePlacementConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodePlacementConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1226,6 +1318,7 @@ public partial class BatchNodeRebootContent : System.ClientModel.Primitives.IJso { public BatchNodeRebootContent() { } public Azure.Compute.Batch.BatchNodeRebootOption? NodeRebootOption { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRebootContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRebootContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1252,11 +1345,43 @@ public BatchNodeRebootContent() { } public static bool operator !=(Azure.Compute.Batch.BatchNodeRebootOption left, Azure.Compute.Batch.BatchNodeRebootOption right) { throw null; } public override string ToString() { throw null; } } + public partial class BatchNodeReimageContent : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public BatchNodeReimageContent() { } + public Azure.Compute.Batch.BatchNodeReimageOption? NodeReimageOption { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.BatchNodeReimageContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.BatchNodeReimageContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct BatchNodeReimageOption : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public BatchNodeReimageOption(string value) { throw null; } + public static Azure.Compute.Batch.BatchNodeReimageOption Requeue { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeReimageOption RetainedData { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeReimageOption TaskCompletion { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeReimageOption Terminate { get { throw null; } } + public bool Equals(Azure.Compute.Batch.BatchNodeReimageOption other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Compute.Batch.BatchNodeReimageOption left, Azure.Compute.Batch.BatchNodeReimageOption right) { throw null; } + public static implicit operator Azure.Compute.Batch.BatchNodeReimageOption (string value) { throw null; } + public static bool operator !=(Azure.Compute.Batch.BatchNodeReimageOption left, Azure.Compute.Batch.BatchNodeReimageOption right) { throw null; } + public override string ToString() { throw null; } + } public partial class BatchNodeRemoteLoginSettings : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal BatchNodeRemoteLoginSettings() { } public string RemoteLoginIpAddress { get { throw null; } } public int RemoteLoginPort { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRemoteLoginSettings System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRemoteLoginSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1269,6 +1394,7 @@ public BatchNodeRemoveContent(System.Collections.Generic.IEnumerable nod public Azure.Compute.Batch.BatchNodeDeallocationOption? NodeDeallocationOption { get { throw null; } set { } } public System.Collections.Generic.IList NodeList { get { throw null; } } public System.TimeSpan? ResizeTimeout { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRemoveContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeRemoveContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1282,6 +1408,8 @@ public BatchNodeRemoveContent(System.Collections.Generic.IEnumerable nod private readonly int _dummyPrimitive; public BatchNodeState(string value) { throw null; } public static Azure.Compute.Batch.BatchNodeState Creating { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeState Deallocated { get { throw null; } } + public static Azure.Compute.Batch.BatchNodeState Deallocating { get { throw null; } } public static Azure.Compute.Batch.BatchNodeState Idle { get { throw null; } } public static Azure.Compute.Batch.BatchNodeState LeavingPool { get { throw null; } } public static Azure.Compute.Batch.BatchNodeState Offline { get { throw null; } } @@ -1313,6 +1441,7 @@ public BatchNodeUserCreateContent(string name) { } public string Name { get { throw null; } } public string Password { get { throw null; } set { } } public string SshPublicKey { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeUserCreateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeUserCreateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1325,6 +1454,7 @@ public BatchNodeUserUpdateContent() { } public System.DateTimeOffset? ExpiryTime { get { throw null; } set { } } public string Password { get { throw null; } set { } } public string SshPublicKey { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeUserUpdateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeUserUpdateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1337,6 +1467,7 @@ internal BatchNodeVMExtension() { } public Azure.Compute.Batch.VMExtensionInstanceView InstanceView { get { throw null; } } public string ProvisioningState { get { throw null; } } public Azure.Compute.Batch.VMExtension VmExtension { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeVMExtension System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchNodeVMExtension System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1383,6 +1514,7 @@ internal BatchPool() { } public System.Collections.Generic.IReadOnlyList UserAccounts { get { throw null; } } public Azure.Compute.Batch.VirtualMachineConfiguration VirtualMachineConfiguration { get { throw null; } } public string VmSize { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPool System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPool System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1414,6 +1546,7 @@ public BatchPoolCreateContent(string id, string vmSize) { } public System.Collections.Generic.IList UserAccounts { get { throw null; } } public Azure.Compute.Batch.VirtualMachineConfiguration VirtualMachineConfiguration { get { throw null; } set { } } public string VmSize { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolCreateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolCreateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1425,6 +1558,7 @@ public partial class BatchPoolEnableAutoScaleContent : System.ClientModel.Primit public BatchPoolEnableAutoScaleContent() { } public System.TimeSpan? AutoScaleEvaluationInterval { get { throw null; } set { } } public string AutoScaleFormula { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEnableAutoScaleContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEnableAutoScaleContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1435,6 +1569,7 @@ public partial class BatchPoolEndpointConfiguration : System.ClientModel.Primiti { public BatchPoolEndpointConfiguration(System.Collections.Generic.IEnumerable inboundNatPools) { } public System.Collections.Generic.IList InboundNatPools { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEndpointConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEndpointConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1445,6 +1580,7 @@ public partial class BatchPoolEvaluateAutoScaleContent : System.ClientModel.Prim { public BatchPoolEvaluateAutoScaleContent(string autoScaleFormula) { } public string AutoScaleFormula { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEvaluateAutoScaleContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolEvaluateAutoScaleContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1456,6 +1592,7 @@ public partial class BatchPoolIdentity : System.ClientModel.Primitives.IJsonMode internal BatchPoolIdentity() { } public Azure.Compute.Batch.BatchPoolIdentityType Type { get { throw null; } } public System.Collections.Generic.IReadOnlyList UserAssignedIdentities { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolIdentity System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolIdentity System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1485,6 +1622,7 @@ public partial class BatchPoolInfo : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1515,6 +1653,7 @@ internal BatchPoolNodeCounts() { } public Azure.Compute.Batch.BatchNodeCounts Dedicated { get { throw null; } } public Azure.Compute.Batch.BatchNodeCounts LowPriority { get { throw null; } } public string PoolId { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolNodeCounts System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolNodeCounts System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1529,6 +1668,7 @@ public BatchPoolReplaceContent(System.Collections.Generic.IEnumerable.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolReplaceContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } Azure.Compute.Batch.BatchPoolReplaceContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1541,6 +1681,7 @@ public BatchPoolResizeContent() { } public System.TimeSpan? ResizeTimeout { get { throw null; } set { } } public int? TargetDedicatedNodes { get { throw null; } set { } } public int? TargetLowPriorityNodes { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolResizeContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolResizeContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1563,6 +1704,7 @@ internal BatchPoolResourceStatistics() { } public float PeakDiskGiB { get { throw null; } } public float PeakMemoryGiB { get { throw null; } } public System.DateTimeOffset StartTime { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolResourceStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolResourceStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1593,6 +1735,7 @@ public BatchPoolSpecification(string vmSize) { } public System.Collections.Generic.IList UserAccounts { get { throw null; } } public Azure.Compute.Batch.VirtualMachineConfiguration VirtualMachineConfiguration { get { throw null; } set { } } public string VmSize { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolSpecification System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolSpecification System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1625,6 +1768,7 @@ internal BatchPoolStatistics() { } public System.DateTimeOffset StartTime { get { throw null; } } public string Url { get { throw null; } } public Azure.Compute.Batch.BatchPoolUsageStatistics UsageStats { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1635,9 +1779,21 @@ public partial class BatchPoolUpdateContent : System.ClientModel.Primitives.IJso { public BatchPoolUpdateContent() { } public System.Collections.Generic.IList ApplicationPackageReferences { get { throw null; } } + public string DisplayName { get { throw null; } set { } } + public bool? EnableInterNodeCommunication { get { throw null; } set { } } public System.Collections.Generic.IList Metadata { get { throw null; } } + public System.Collections.Generic.IList MountConfiguration { get { throw null; } } + public Azure.Compute.Batch.NetworkConfiguration NetworkConfiguration { get { throw null; } set { } } + public System.Collections.Generic.IDictionary ResourceTags { get { throw null; } } public Azure.Compute.Batch.BatchStartTask StartTask { get { throw null; } set { } } public Azure.Compute.Batch.BatchNodeCommunicationMode? TargetNodeCommunicationMode { get { throw null; } set { } } + public Azure.Compute.Batch.BatchTaskSchedulingPolicy TaskSchedulingPolicy { get { throw null; } set { } } + public int? TaskSlotsPerNode { get { throw null; } set { } } + public Azure.Compute.Batch.UpgradePolicy UpgradePolicy { get { throw null; } set { } } + public System.Collections.Generic.IList UserAccounts { get { throw null; } } + public Azure.Compute.Batch.VirtualMachineConfiguration VirtualMachineConfiguration { get { throw null; } set { } } + public string VmSize { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUpdateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUpdateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1652,6 +1808,7 @@ internal BatchPoolUsageMetrics() { } public System.DateTimeOffset StartTime { get { throw null; } } public float TotalCoreHours { get { throw null; } } public string VmSize { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUsageMetrics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUsageMetrics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1664,6 +1821,7 @@ internal BatchPoolUsageStatistics() { } public System.TimeSpan DedicatedCoreTime { get { throw null; } } public System.DateTimeOffset LastUpdateTime { get { throw null; } } public System.DateTimeOffset StartTime { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUsageStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchPoolUsageStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1680,6 +1838,7 @@ public BatchStartTask(string commandLine) { } public System.Collections.Generic.IList ResourceFiles { get { throw null; } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } set { } } public bool? WaitForSuccess { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchStartTask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchStartTask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1698,6 +1857,7 @@ internal BatchStartTaskInfo() { } public int RetryCount { get { throw null; } } public System.DateTimeOffset StartTime { get { throw null; } } public Azure.Compute.Batch.BatchStartTaskState State { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchStartTaskInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchStartTaskInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1737,6 +1897,7 @@ internal BatchSubtask() { } public System.DateTimeOffset? StartTime { get { throw null; } } public Azure.Compute.Batch.BatchSubtaskState? State { get { throw null; } } public System.DateTimeOffset? StateTransitionTime { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchSubtask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchSubtask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1771,6 +1932,7 @@ internal BatchSupportedImage() { } public string NodeAgentSkuId { get { throw null; } } public Azure.Compute.Batch.OSType OsType { get { throw null; } } public Azure.Compute.Batch.ImageVerificationType VerificationType { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchSupportedImage System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchSupportedImage System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1807,6 +1969,7 @@ public BatchTask() { } public Azure.Compute.Batch.BatchTaskStatistics Stats { get { throw null; } } public string Url { get { throw null; } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTask System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTask System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1817,6 +1980,7 @@ public partial class BatchTaskAddCollectionResult : System.ClientModel.Primitive { internal BatchTaskAddCollectionResult() { } public System.Collections.Generic.IReadOnlyList Value { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskAddCollectionResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskAddCollectionResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1832,6 +1996,7 @@ internal BatchTaskAddResult() { } public string Location { get { throw null; } } public Azure.Compute.Batch.BatchTaskAddStatus Status { get { throw null; } } public string TaskId { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskAddResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskAddResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1863,6 +2028,7 @@ public BatchTaskConstraints() { } public int? MaxTaskRetryCount { get { throw null; } set { } } public System.TimeSpan? MaxWallClockTime { get { throw null; } set { } } public System.TimeSpan? RetentionTime { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskConstraints System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskConstraints System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1875,6 +2041,7 @@ internal BatchTaskContainerExecutionInfo() { } public string ContainerId { get { throw null; } } public string Error { get { throw null; } } public string State { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskContainerExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskContainerExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1884,10 +2051,12 @@ internal BatchTaskContainerExecutionInfo() { } public partial class BatchTaskContainerSettings : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public BatchTaskContainerSettings(string imageName) { } + public System.Collections.Generic.IList ContainerHostBatchBindMounts { get { throw null; } } public string ContainerRunOptions { get { throw null; } set { } } public string ImageName { get { throw null; } set { } } public Azure.Compute.Batch.ContainerRegistryReference Registry { get { throw null; } set { } } public Azure.Compute.Batch.ContainerWorkingDirectory? WorkingDirectory { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskContainerSettings System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskContainerSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1902,6 +2071,7 @@ internal BatchTaskCounts() { } public int Failed { get { throw null; } } public int Running { get { throw null; } } public int Succeeded { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCounts System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCounts System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1913,6 +2083,7 @@ public partial class BatchTaskCountsResult : System.ClientModel.Primitives.IJson internal BatchTaskCountsResult() { } public Azure.Compute.Batch.BatchTaskCounts TaskCounts { get { throw null; } } public Azure.Compute.Batch.BatchTaskSlotCounts TaskSlotCounts { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCountsResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCountsResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1938,6 +2109,7 @@ public BatchTaskCreateContent(string id, string commandLine) { } public int? RequiredSlots { get { throw null; } set { } } public System.Collections.Generic.IList ResourceFiles { get { throw null; } } public Azure.Compute.Batch.UserIdentity UserIdentity { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCreateContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskCreateContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1949,6 +2121,7 @@ public partial class BatchTaskDependencies : System.ClientModel.Primitives.IJson public BatchTaskDependencies() { } public System.Collections.Generic.IList TaskIdRanges { get { throw null; } } public System.Collections.Generic.IList TaskIds { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskDependencies System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskDependencies System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1968,6 +2141,7 @@ internal BatchTaskExecutionInfo() { } public Azure.Compute.Batch.BatchTaskExecutionResult? Result { get { throw null; } } public int RetryCount { get { throw null; } } public System.DateTimeOffset? StartTime { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskExecutionInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskExecutionInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1999,6 +2173,7 @@ internal BatchTaskFailureInfo() { } public string Code { get { throw null; } } public System.Collections.Generic.IReadOnlyList Details { get { throw null; } } public string Message { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskFailureInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskFailureInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2009,6 +2184,7 @@ public partial class BatchTaskGroup : System.ClientModel.Primitives.IJsonModel value) { } public System.Collections.Generic.IList Value { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskGroup System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskGroup System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2020,6 +2196,7 @@ public partial class BatchTaskIdRange : System.ClientModel.Primitives.IJsonModel public BatchTaskIdRange(int start, int end) { } public int End { get { throw null; } set { } } public int Start { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskIdRange System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskIdRange System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2035,6 +2212,7 @@ internal BatchTaskInfo() { } public string TaskId { get { throw null; } } public Azure.Compute.Batch.BatchTaskState TaskState { get { throw null; } } public string TaskUrl { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2045,6 +2223,7 @@ public partial class BatchTaskSchedulingPolicy : System.ClientModel.Primitives.I { public BatchTaskSchedulingPolicy(Azure.Compute.Batch.BatchNodeFillType nodeFillType) { } public Azure.Compute.Batch.BatchNodeFillType NodeFillType { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskSchedulingPolicy System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskSchedulingPolicy System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2059,6 +2238,7 @@ internal BatchTaskSlotCounts() { } public int Failed { get { throw null; } } public int Running { get { throw null; } } public int Succeeded { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskSlotCounts System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskSlotCounts System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2099,6 +2279,7 @@ internal BatchTaskStatistics() { } public System.TimeSpan WallClockTime { get { throw null; } } public float WriteIOGiB { get { throw null; } } public long WriteIOps { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskStatistics System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.BatchTaskStatistics System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2132,6 +2313,7 @@ public CifsMountConfiguration(string username, string source, string relativeMou public string RelativeMountPath { get { throw null; } set { } } public string Source { get { throw null; } set { } } public string Username { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.CifsMountConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.CifsMountConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2160,7 +2342,7 @@ public static partial class ComputeBatchModelFactory public static Azure.Compute.Batch.BatchJobStatistics BatchJobStatistics(string url = null, System.DateTimeOffset startTime = default(System.DateTimeOffset), System.DateTimeOffset lastUpdateTime = default(System.DateTimeOffset), System.TimeSpan userCpuTime = default(System.TimeSpan), System.TimeSpan kernelCpuTime = default(System.TimeSpan), System.TimeSpan wallClockTime = default(System.TimeSpan), long readIOps = (long)0, long writeIOps = (long)0, float readIOGiB = 0f, float writeIOGiB = 0f, long numSucceededTasks = (long)0, long numFailedTasks = (long)0, long numTaskRetries = (long)0, System.TimeSpan waitTime = default(System.TimeSpan)) { throw null; } public static Azure.Compute.Batch.BatchNode BatchNode(string id = null, string url = null, Azure.Compute.Batch.BatchNodeState? state = default(Azure.Compute.Batch.BatchNodeState?), Azure.Compute.Batch.SchedulingState? schedulingState = default(Azure.Compute.Batch.SchedulingState?), System.DateTimeOffset? stateTransitionTime = default(System.DateTimeOffset?), System.DateTimeOffset? lastBootTime = default(System.DateTimeOffset?), System.DateTimeOffset? allocationTime = default(System.DateTimeOffset?), string ipAddress = null, string affinityId = null, string vmSize = null, int? totalTasksRun = default(int?), int? runningTasksCount = default(int?), int? runningTaskSlotsCount = default(int?), int? totalTasksSucceeded = default(int?), System.Collections.Generic.IEnumerable recentTasks = null, Azure.Compute.Batch.BatchStartTask startTask = null, Azure.Compute.Batch.BatchStartTaskInfo startTaskInfo = null, System.Collections.Generic.IEnumerable errors = null, bool? isDedicated = default(bool?), Azure.Compute.Batch.BatchNodeEndpointConfiguration endpointConfiguration = null, Azure.Compute.Batch.BatchNodeAgentInfo nodeAgentInfo = null, Azure.Compute.Batch.VirtualMachineInfo virtualMachineInfo = null) { throw null; } public static Azure.Compute.Batch.BatchNodeAgentInfo BatchNodeAgentInfo(string version = null, System.DateTimeOffset lastUpdateTime = default(System.DateTimeOffset)) { throw null; } - public static Azure.Compute.Batch.BatchNodeCounts BatchNodeCounts(int creating = 0, int idle = 0, int offline = 0, int preempted = 0, int rebooting = 0, int reimaging = 0, int running = 0, int starting = 0, int startTaskFailed = 0, int leavingPool = 0, int unknown = 0, int unusable = 0, int waitingForStartTask = 0, int total = 0, int upgradingOs = 0) { throw null; } + public static Azure.Compute.Batch.BatchNodeCounts BatchNodeCounts(int creating = 0, int idle = 0, int offline = 0, int preempted = 0, int rebooting = 0, int reimaging = 0, int running = 0, int starting = 0, int startTaskFailed = 0, int leavingPool = 0, int unknown = 0, int unusable = 0, int waitingForStartTask = 0, int deallocated = 0, int deallocating = 0, int total = 0, int upgradingOs = 0) { throw null; } public static Azure.Compute.Batch.BatchNodeEndpointConfiguration BatchNodeEndpointConfiguration(System.Collections.Generic.IEnumerable inboundEndpoints = null) { throw null; } public static Azure.Compute.Batch.BatchNodeError BatchNodeError(string code = null, string message = null, System.Collections.Generic.IEnumerable errorDetails = null) { throw null; } public static Azure.Compute.Batch.BatchNodeFile BatchNodeFile(string name = null, string url = null, bool? isDirectory = default(bool?), Azure.Compute.Batch.FileProperties properties = null) { throw null; } @@ -2191,8 +2373,8 @@ public static partial class ComputeBatchModelFactory public static Azure.Compute.Batch.BatchTaskInfo BatchTaskInfo(string taskUrl = null, string jobId = null, string taskId = null, int? subtaskId = default(int?), Azure.Compute.Batch.BatchTaskState taskState = default(Azure.Compute.Batch.BatchTaskState), Azure.Compute.Batch.BatchTaskExecutionInfo executionInfo = null) { throw null; } public static Azure.Compute.Batch.BatchTaskSlotCounts BatchTaskSlotCounts(int active = 0, int running = 0, int completed = 0, int succeeded = 0, int failed = 0) { throw null; } public static Azure.Compute.Batch.BatchTaskStatistics BatchTaskStatistics(string url = null, System.DateTimeOffset startTime = default(System.DateTimeOffset), System.DateTimeOffset lastUpdateTime = default(System.DateTimeOffset), System.TimeSpan userCpuTime = default(System.TimeSpan), System.TimeSpan kernelCpuTime = default(System.TimeSpan), System.TimeSpan wallClockTime = default(System.TimeSpan), long readIOps = (long)0, long writeIOps = (long)0, float readIOGiB = 0f, float writeIOGiB = 0f, System.TimeSpan waitTime = default(System.TimeSpan)) { throw null; } - public static Azure.Compute.Batch.FileProperties FileProperties(System.DateTimeOffset? creationTime = default(System.DateTimeOffset?), System.DateTimeOffset lastModified = default(System.DateTimeOffset), long contentLength = (long)0, string contentType = null, string fileMode = null) { throw null; } - public static Azure.Compute.Batch.ImageReference ImageReference(string publisher = null, string offer = null, string sku = null, string version = null, string virtualMachineImageId = null, string exactVersion = null) { throw null; } + public static Azure.Compute.Batch.FileProperties FileProperties(System.DateTimeOffset? creationTime = default(System.DateTimeOffset?), System.DateTimeOffset lastModified = default(System.DateTimeOffset), string contentLength = null, string contentType = null, string fileMode = null) { throw null; } + public static Azure.Compute.Batch.ImageReference ImageReference(string publisher = null, string offer = null, string sku = null, string version = null, string virtualMachineImageId = null, string exactVersion = null, string sharedGalleryImageId = null, string communityGalleryImageId = null) { throw null; } public static Azure.Compute.Batch.InboundEndpoint InboundEndpoint(string name = null, Azure.Compute.Batch.InboundEndpointProtocol protocol = default(Azure.Compute.Batch.InboundEndpointProtocol), string publicIpAddress = null, string publicFQDN = null, int frontendPort = 0, int backendPort = 0) { throw null; } public static Azure.Compute.Batch.InstanceViewStatus InstanceViewStatus(string code = null, string displayStatus = null, Azure.Compute.Batch.StatusLevelTypes? level = default(Azure.Compute.Batch.StatusLevelTypes?), string message = null, System.DateTimeOffset? time = default(System.DateTimeOffset?)) { throw null; } public static Azure.Compute.Batch.NameValuePair NameValuePair(string name = null, string value = null) { throw null; } @@ -2210,12 +2392,47 @@ public ContainerConfiguration(Azure.Compute.Batch.ContainerType type) { } public System.Collections.Generic.IList ContainerImageNames { get { throw null; } } public System.Collections.Generic.IList ContainerRegistries { get { throw null; } } public Azure.Compute.Batch.ContainerType Type { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ContainerConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ContainerConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class ContainerHostBatchBindMountEntry : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContainerHostBatchBindMountEntry() { } + public bool? IsReadOnly { get { throw null; } set { } } + public Azure.Compute.Batch.ContainerHostDataPath? Source { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.ContainerHostBatchBindMountEntry System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.ContainerHostBatchBindMountEntry System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ContainerHostDataPath : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ContainerHostDataPath(string value) { throw null; } + public static Azure.Compute.Batch.ContainerHostDataPath Applications { get { throw null; } } + public static Azure.Compute.Batch.ContainerHostDataPath JobPrep { get { throw null; } } + public static Azure.Compute.Batch.ContainerHostDataPath Shared { get { throw null; } } + public static Azure.Compute.Batch.ContainerHostDataPath Startup { get { throw null; } } + public static Azure.Compute.Batch.ContainerHostDataPath Task { get { throw null; } } + public static Azure.Compute.Batch.ContainerHostDataPath VfsMounts { get { throw null; } } + public bool Equals(Azure.Compute.Batch.ContainerHostDataPath other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Compute.Batch.ContainerHostDataPath left, Azure.Compute.Batch.ContainerHostDataPath right) { throw null; } + public static implicit operator Azure.Compute.Batch.ContainerHostDataPath (string value) { throw null; } + public static bool operator !=(Azure.Compute.Batch.ContainerHostDataPath left, Azure.Compute.Batch.ContainerHostDataPath right) { throw null; } + public override string ToString() { throw null; } + } public partial class ContainerRegistryReference : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public ContainerRegistryReference() { } @@ -2223,6 +2440,7 @@ public ContainerRegistryReference() { } public string Password { get { throw null; } set { } } public string RegistryServer { get { throw null; } set { } } public string Username { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ContainerRegistryReference System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ContainerRegistryReference System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2272,6 +2490,7 @@ public DataDisk(int logicalUnitNumber, int diskSizeGb) { } public int DiskSizeGb { get { throw null; } set { } } public int LogicalUnitNumber { get { throw null; } set { } } public Azure.Compute.Batch.StorageAccountType? StorageAccountType { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DataDisk System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DataDisk System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2317,6 +2536,7 @@ public partial class DiffDiskSettings : System.ClientModel.Primitives.IJsonModel { public DiffDiskSettings() { } public Azure.Compute.Batch.DiffDiskPlacement? Placement { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DiffDiskSettings System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DiffDiskSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2346,6 +2566,7 @@ public partial class DiskEncryptionConfiguration : System.ClientModel.Primitives { public DiskEncryptionConfiguration() { } public System.Collections.Generic.IList Targets { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DiskEncryptionConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.DiskEncryptionConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2411,6 +2632,7 @@ public partial class EnvironmentSetting : System.ClientModel.Primitives.IJsonMod public EnvironmentSetting(string name) { } public string Name { get { throw null; } set { } } public string Value { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.EnvironmentSetting System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.EnvironmentSetting System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2440,6 +2662,7 @@ public partial class ExitCodeMapping : System.ClientModel.Primitives.IJsonModel< public ExitCodeMapping(int code, Azure.Compute.Batch.ExitOptions exitOptions) { } public int Code { get { throw null; } set { } } public Azure.Compute.Batch.ExitOptions ExitOptions { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitCodeMapping System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitCodeMapping System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2452,6 +2675,7 @@ public ExitCodeRangeMapping(int start, int end, Azure.Compute.Batch.ExitOptions public int End { get { throw null; } set { } } public Azure.Compute.Batch.ExitOptions ExitOptions { get { throw null; } set { } } public int Start { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitCodeRangeMapping System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitCodeRangeMapping System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2466,6 +2690,7 @@ public ExitConditions() { } public System.Collections.Generic.IList ExitCodes { get { throw null; } } public Azure.Compute.Batch.ExitOptions FileUploadError { get { throw null; } set { } } public Azure.Compute.Batch.ExitOptions PreProcessingError { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitConditions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitConditions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2477,6 +2702,7 @@ public partial class ExitOptions : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ExitOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2486,11 +2712,12 @@ public ExitOptions() { } public partial class FileProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal FileProperties() { } - public long ContentLength { get { throw null; } } + public string ContentLength { get { throw null; } } public string ContentType { get { throw null; } } public System.DateTimeOffset? CreationTime { get { throw null; } } public string FileMode { get { throw null; } } public System.DateTimeOffset LastModified { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.FileProperties System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.FileProperties System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2502,6 +2729,7 @@ public partial class HttpHeader : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.HttpHeader System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2511,12 +2739,15 @@ public HttpHeader(string name) { } public partial class ImageReference : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public ImageReference() { } + public string CommunityGalleryImageId { get { throw null; } set { } } public string ExactVersion { get { throw null; } } public string Offer { get { throw null; } set { } } public string Publisher { get { throw null; } set { } } + public string SharedGalleryImageId { get { throw null; } set { } } public string Sku { get { throw null; } set { } } public string Version { get { throw null; } set { } } public string VirtualMachineImageId { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ImageReference System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ImageReference System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2550,6 +2781,7 @@ internal InboundEndpoint() { } public Azure.Compute.Batch.InboundEndpointProtocol Protocol { get { throw null; } } public string PublicFQDN { get { throw null; } } public string PublicIpAddress { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InboundEndpoint System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InboundEndpoint System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2583,6 +2815,7 @@ public InboundNatPool(string name, Azure.Compute.Batch.InboundEndpointProtocol p public string Name { get { throw null; } set { } } public System.Collections.Generic.IList NetworkSecurityGroupRules { get { throw null; } } public Azure.Compute.Batch.InboundEndpointProtocol Protocol { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InboundNatPool System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InboundNatPool System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2597,6 +2830,7 @@ internal InstanceViewStatus() { } public Azure.Compute.Batch.StatusLevelTypes? Level { get { throw null; } } public string Message { get { throw null; } } public System.DateTimeOffset? Time { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InstanceViewStatus System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.InstanceViewStatus System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2628,6 +2862,7 @@ public LinuxUserConfiguration() { } public int? Gid { get { throw null; } set { } } public string SshPrivateKey { get { throw null; } set { } } public int? Uid { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.LinuxUserConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.LinuxUserConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2654,8 +2889,10 @@ public LinuxUserConfiguration() { } } public partial class ManagedDisk : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public ManagedDisk(Azure.Compute.Batch.StorageAccountType storageAccountType) { } - public Azure.Compute.Batch.StorageAccountType StorageAccountType { get { throw null; } set { } } + public ManagedDisk() { } + public Azure.Compute.Batch.VMDiskSecurityProfile SecurityProfile { get { throw null; } set { } } + public Azure.Compute.Batch.StorageAccountType? StorageAccountType { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ManagedDisk System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ManagedDisk System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2667,6 +2904,7 @@ public partial class MetadataItem : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.MetadataItem System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2680,6 +2918,7 @@ public MountConfiguration() { } public Azure.Compute.Batch.AzureFileShareConfiguration AzureFileShareConfiguration { get { throw null; } set { } } public Azure.Compute.Batch.CifsMountConfiguration CifsMountConfiguration { get { throw null; } set { } } public Azure.Compute.Batch.NfsMountConfiguration NfsMountConfiguration { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.MountConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.MountConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2692,6 +2931,7 @@ public MultiInstanceSettings(string coordinationCommandLine) { } public System.Collections.Generic.IList CommonResourceFiles { get { throw null; } } public string CoordinationCommandLine { get { throw null; } set { } } public int? NumberOfInstances { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.MultiInstanceSettings System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.MultiInstanceSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2703,6 +2943,7 @@ public partial class NameValuePair : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NameValuePair System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2717,6 +2958,7 @@ public NetworkConfiguration() { } public Azure.Compute.Batch.BatchPoolEndpointConfiguration EndpointConfiguration { get { throw null; } set { } } public Azure.Compute.Batch.PublicIpAddressConfiguration PublicIpAddressConfiguration { get { throw null; } set { } } public string SubnetId { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NetworkConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NetworkConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2730,6 +2972,7 @@ public NetworkSecurityGroupRule(int priority, Azure.Compute.Batch.NetworkSecurit public int Priority { get { throw null; } set { } } public string SourceAddressPrefix { get { throw null; } set { } } public System.Collections.Generic.IList SourcePortRanges { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NetworkSecurityGroupRule System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NetworkSecurityGroupRule System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2760,6 +3003,7 @@ public NfsMountConfiguration(string source, string relativeMountPath) { } public string MountOptions { get { throw null; } set { } } public string RelativeMountPath { get { throw null; } set { } } public string Source { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NfsMountConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.NfsMountConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2810,6 +3054,7 @@ public OSDisk() { } public Azure.Compute.Batch.DiffDiskSettings EphemeralOSDiskSettings { get { throw null; } set { } } public Azure.Compute.Batch.ManagedDisk ManagedDisk { get { throw null; } set { } } public bool? WriteAcceleratorEnabled { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OSDisk System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OSDisk System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2840,6 +3085,7 @@ public OutputFile(string filePattern, Azure.Compute.Batch.OutputFileDestination public Azure.Compute.Batch.OutputFileDestination Destination { get { throw null; } set { } } public string FilePattern { get { throw null; } set { } } public Azure.Compute.Batch.OutputFileUploadConfig UploadOptions { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFile System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFile System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2853,6 +3099,7 @@ public OutputFileBlobContainerDestination(string containerUrl) { } public Azure.Compute.Batch.BatchNodeIdentityReference IdentityReference { get { throw null; } set { } } public string Path { get { throw null; } set { } } public System.Collections.Generic.IList UploadHeaders { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileBlobContainerDestination System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileBlobContainerDestination System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2863,6 +3110,7 @@ public partial class OutputFileDestination : System.ClientModel.Primitives.IJson { public OutputFileDestination() { } public Azure.Compute.Batch.OutputFileBlobContainerDestination Container { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileDestination System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileDestination System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2892,6 +3140,7 @@ public partial class OutputFileUploadConfig : System.ClientModel.Primitives.IJso { public OutputFileUploadConfig(Azure.Compute.Batch.OutputFileUploadCondition uploadCondition) { } public Azure.Compute.Batch.OutputFileUploadCondition UploadCondition { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileUploadConfig System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.OutputFileUploadConfig System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2903,6 +3152,7 @@ public partial class PublicIpAddressConfiguration : System.ClientModel.Primitive public PublicIpAddressConfiguration() { } public System.Collections.Generic.IList IpAddressIds { get { throw null; } } public Azure.Compute.Batch.IpAddressProvisioningType? IpAddressProvisioningType { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.PublicIpAddressConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.PublicIpAddressConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2914,6 +3164,7 @@ public partial class RecentBatchJob : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.RecentBatchJob System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2926,6 +3177,7 @@ internal ResizeError() { } public string Code { get { throw null; } } public string Message { get { throw null; } } public System.Collections.Generic.IReadOnlyList Values { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ResizeError System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ResizeError System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2942,6 +3194,7 @@ public ResourceFile() { } public string HttpUrl { get { throw null; } set { } } public Azure.Compute.Batch.BatchNodeIdentityReference IdentityReference { get { throw null; } set { } } public string StorageContainerUrl { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ResourceFile System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ResourceFile System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2958,6 +3211,7 @@ public RollingUpgradePolicy() { } public System.TimeSpan? PauseTimeBetweenBatches { get { throw null; } set { } } public bool? PrioritizeUnhealthyInstances { get { throw null; } set { } } public bool? RollbackFailedInstancesOnPolicyBreach { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.RollingUpgradePolicy System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.RollingUpgradePolicy System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -2982,12 +3236,31 @@ public RollingUpgradePolicy() { } public static bool operator !=(Azure.Compute.Batch.SchedulingState left, Azure.Compute.Batch.SchedulingState right) { throw null; } public override string ToString() { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct SecurityEncryptionTypes : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public SecurityEncryptionTypes(string value) { throw null; } + public static Azure.Compute.Batch.SecurityEncryptionTypes NonPersistedTPM { get { throw null; } } + public static Azure.Compute.Batch.SecurityEncryptionTypes VMGuestStateOnly { get { throw null; } } + public bool Equals(Azure.Compute.Batch.SecurityEncryptionTypes other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Compute.Batch.SecurityEncryptionTypes left, Azure.Compute.Batch.SecurityEncryptionTypes right) { throw null; } + public static implicit operator Azure.Compute.Batch.SecurityEncryptionTypes (string value) { throw null; } + public static bool operator !=(Azure.Compute.Batch.SecurityEncryptionTypes left, Azure.Compute.Batch.SecurityEncryptionTypes right) { throw null; } + public override string ToString() { throw null; } + } public partial class SecurityProfile : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public SecurityProfile(bool encryptionAtHost, Azure.Compute.Batch.SecurityTypes securityType, Azure.Compute.Batch.UefiSettings uefiSettings) { } public bool EncryptionAtHost { get { throw null; } set { } } public Azure.Compute.Batch.SecurityTypes SecurityType { get { throw null; } set { } } public Azure.Compute.Batch.UefiSettings UefiSettings { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.SecurityProfile System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.SecurityProfile System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3000,6 +3273,7 @@ public SecurityProfile(bool encryptionAtHost, Azure.Compute.Batch.SecurityTypes private readonly object _dummy; private readonly int _dummyPrimitive; public SecurityTypes(string value) { throw null; } + public static Azure.Compute.Batch.SecurityTypes ConfidentialVM { get { throw null; } } public static Azure.Compute.Batch.SecurityTypes TrustedLaunch { get { throw null; } } public bool Equals(Azure.Compute.Batch.SecurityTypes other) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] @@ -3015,6 +3289,7 @@ public partial class ServiceArtifactReference : System.ClientModel.Primitives.IJ { public ServiceArtifactReference(string id) { } public string Id { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ServiceArtifactReference System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.ServiceArtifactReference System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3064,6 +3339,7 @@ public partial class UefiSettings : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UefiSettings System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3095,6 +3371,7 @@ public UpgradePolicy(Azure.Compute.Batch.UpgradeMode mode) { } public Azure.Compute.Batch.AutomaticOsUpgradePolicy AutomaticOsUpgradePolicy { get { throw null; } set { } } public Azure.Compute.Batch.UpgradeMode Mode { get { throw null; } set { } } public Azure.Compute.Batch.RollingUpgradePolicy RollingUpgradePolicy { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UpgradePolicy System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UpgradePolicy System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3108,6 +3385,7 @@ public UploadBatchServiceLogsContent(string containerUrl, System.DateTimeOffset public System.DateTimeOffset? EndTime { get { throw null; } set { } } public Azure.Compute.Batch.BatchNodeIdentityReference IdentityReference { get { throw null; } set { } } public System.DateTimeOffset StartTime { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UploadBatchServiceLogsContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UploadBatchServiceLogsContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3119,6 +3397,7 @@ public partial class UploadBatchServiceLogsResult : System.ClientModel.Primitive internal UploadBatchServiceLogsResult() { } public int NumberOfFilesUploaded { get { throw null; } } public string VirtualDirectoryName { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UploadBatchServiceLogsResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UploadBatchServiceLogsResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3133,6 +3412,7 @@ public UserAccount(string name, string password) { } public string Name { get { throw null; } set { } } public string Password { get { throw null; } set { } } public Azure.Compute.Batch.WindowsUserConfiguration WindowsUserConfiguration { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UserAccount System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UserAccount System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3145,6 +3425,7 @@ internal UserAssignedIdentity() { } public string ClientId { get { throw null; } } public string PrincipalId { get { throw null; } } public string ResourceId { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UserAssignedIdentity System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UserAssignedIdentity System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3156,6 +3437,7 @@ public partial class UserIdentity : System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.UserIdentity System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3177,6 +3459,7 @@ public VirtualMachineConfiguration(Azure.Compute.Batch.ImageReference imageRefer public Azure.Compute.Batch.SecurityProfile SecurityProfile { get { throw null; } set { } } public Azure.Compute.Batch.ServiceArtifactReference ServiceArtifactReference { get { throw null; } set { } } public Azure.Compute.Batch.WindowsConfiguration WindowsConfiguration { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VirtualMachineConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VirtualMachineConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3188,12 +3471,24 @@ public partial class VirtualMachineInfo : System.ClientModel.Primitives.IJsonMod internal VirtualMachineInfo() { } public Azure.Compute.Batch.ImageReference ImageReference { get { throw null; } } public string ScaleSetVmResourceId { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VirtualMachineInfo System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VirtualMachineInfo System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class VMDiskSecurityProfile : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public VMDiskSecurityProfile() { } + public Azure.Compute.Batch.SecurityEncryptionTypes? SecurityEncryptionType { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.VMDiskSecurityProfile System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.Compute.Batch.VMDiskSecurityProfile System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class VMExtension : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public VMExtension(string name, string publisher, string type) { } @@ -3206,6 +3501,7 @@ public VMExtension(string name, string publisher, string type) { } public System.Collections.Generic.IDictionary Settings { get { throw null; } } public string Type { get { throw null; } set { } } public string TypeHandlerVersion { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VMExtension System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VMExtension System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3218,6 +3514,7 @@ internal VMExtensionInstanceView() { } public string Name { get { throw null; } } public System.Collections.Generic.IReadOnlyList Statuses { get { throw null; } } public System.Collections.Generic.IReadOnlyList SubStatuses { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VMExtensionInstanceView System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.VMExtensionInstanceView System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3228,6 +3525,7 @@ public partial class WindowsConfiguration : System.ClientModel.Primitives.IJsonM { public WindowsConfiguration() { } public bool? EnableAutomaticUpdates { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.WindowsConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.WindowsConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -3238,6 +3536,7 @@ public partial class WindowsUserConfiguration : System.ClientModel.Primitives.IJ { public WindowsUserConfiguration() { } public Azure.Compute.Batch.LoginMode? LoginMode { get { throw null; } set { } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.WindowsUserConfiguration System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.Compute.Batch.WindowsUserConfiguration System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AffinityInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AffinityInfo.Serialization.cs index f2b549e739c0b..d49a42dae8add 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AffinityInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AffinityInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class AffinityInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(AffinityInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("affinityId"u8); writer.WriteStringValue(AffinityId); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } AffinityInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AuthenticationTokenSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AuthenticationTokenSettings.Serialization.cs index 418b0c9f4ada9..1329c921ce655 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AuthenticationTokenSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AuthenticationTokenSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class AuthenticationTokenSettings : IUtf8JsonSerializable, IJsonM void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR throw new FormatException($"The model {nameof(AuthenticationTokenSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsCollectionDefined(Access)) { writer.WritePropertyName("access"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR #endif } } - writer.WriteEndObject(); } AuthenticationTokenSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRun.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRun.Serialization.cs index 48bc6352dae69..bf93569283616 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRun.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRun.Serialization.cs @@ -18,6 +18,15 @@ public partial class AutoScaleRun : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(AutoScaleRun)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("timestamp"u8); writer.WriteStringValue(Timestamp, "O"); if (Optional.IsDefined(Results)) @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } AutoScaleRun IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRunError.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRunError.Serialization.cs index d8cb62f788ff7..83707fb80af1e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRunError.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoScaleRunError.Serialization.cs @@ -18,6 +18,15 @@ public partial class AutoScaleRunError : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite throw new FormatException($"The model {nameof(AutoScaleRunError)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Code)) { writer.WritePropertyName("code"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite #endif } } - writer.WriteEndObject(); } AutoScaleRunError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoUserSpecification.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoUserSpecification.Serialization.cs index 17a5690247ce9..d85e1671ceade 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AutoUserSpecification.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AutoUserSpecification.Serialization.cs @@ -18,6 +18,15 @@ public partial class AutoUserSpecification : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(AutoUserSpecification)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Scope)) { writer.WritePropertyName("scope"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } AutoUserSpecification IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.Serialization.cs index 1c83efe2f9a70..e1e78e2d11509 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.Serialization.cs @@ -18,6 +18,15 @@ public partial class AutomaticOsUpgradePolicy : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(AutomaticOsUpgradePolicy)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(DisableAutomaticRollback)) { writer.WritePropertyName("disableAutomaticRollback"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } AutomaticOsUpgradePolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.cs index a55f474ba013a..c3b9be08a2f0d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AutomaticOsUpgradePolicy.cs @@ -52,7 +52,7 @@ public AutomaticOsUpgradePolicy() /// Initializes a new instance of . /// Whether OS image rollback feature should be disabled. - /// Indicates whether OS upgrades should automatically be applied to scale set instances in a rolling fashion when a newer version of the OS image becomes available. <br /><br /> If this is set to true for Windows based pools, [WindowsConfiguration.enableAutomaticUpdates](https://learn.microsoft.com/en-us/rest/api/batchservice/pool/add?tabs=HTTP#windowsconfiguration) cannot be set to true. + /// Indicates whether OS upgrades should automatically be applied to scale set instances in a rolling fashion when a newer version of the OS image becomes available. <br /><br /> If this is set to true for Windows based pools, [WindowsConfiguration.enableAutomaticUpdates](https://learn.microsoft.com/rest/api/batchservice/pool/add?tabs=HTTP#windowsconfiguration) cannot be set to true. /// Indicates whether rolling upgrade policy should be used during Auto OS Upgrade. Auto OS Upgrade will fallback to the default policy if no policy is defined on the VMSS. /// Defer OS upgrades on the TVMs if they are running tasks. /// Keeps track of any properties unknown to the library. @@ -67,7 +67,7 @@ internal AutomaticOsUpgradePolicy(bool? disableAutomaticRollback, bool? enableAu /// Whether OS image rollback feature should be disabled. public bool? DisableAutomaticRollback { get; set; } - /// Indicates whether OS upgrades should automatically be applied to scale set instances in a rolling fashion when a newer version of the OS image becomes available. <br /><br /> If this is set to true for Windows based pools, [WindowsConfiguration.enableAutomaticUpdates](https://learn.microsoft.com/en-us/rest/api/batchservice/pool/add?tabs=HTTP#windowsconfiguration) cannot be set to true. + /// Indicates whether OS upgrades should automatically be applied to scale set instances in a rolling fashion when a newer version of the OS image becomes available. <br /><br /> If this is set to true for Windows based pools, [WindowsConfiguration.enableAutomaticUpdates](https://learn.microsoft.com/rest/api/batchservice/pool/add?tabs=HTTP#windowsconfiguration) cannot be set to true. public bool? EnableAutomaticOsUpgrade { get; set; } /// Indicates whether rolling upgrade policy should be used during Auto OS Upgrade. Auto OS Upgrade will fallback to the default policy if no policy is defined on the VMSS. public bool? UseRollingUpgradePolicy { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AzureBlobFileSystemConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AzureBlobFileSystemConfiguration.Serialization.cs index d7170869ed0bb..1299ea54ebd14 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AzureBlobFileSystemConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AzureBlobFileSystemConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class AzureBlobFileSystemConfiguration : IUtf8JsonSerializable, I void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M throw new FormatException($"The model {nameof(AzureBlobFileSystemConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("accountName"u8); writer.WriteStringValue(AccountName); writer.WritePropertyName("containerName"u8); @@ -67,7 +75,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M #endif } } - writer.WriteEndObject(); } AzureBlobFileSystemConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/AzureFileShareConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/AzureFileShareConfiguration.Serialization.cs index 62c45e62098fb..b547ee2bcdf86 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/AzureFileShareConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/AzureFileShareConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class AzureFileShareConfiguration : IUtf8JsonSerializable, IJsonM void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR throw new FormatException($"The model {nameof(AzureFileShareConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("accountName"u8); writer.WriteStringValue(AccountName); writer.WritePropertyName("azureFileUrl"u8); @@ -54,7 +62,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR #endif } } - writer.WriteEndObject(); } AzureFileShareConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplication.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplication.Serialization.cs index ecc68700eb6c8..510483833a5dc 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplication.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplication.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchApplication : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter throw new FormatException($"The model {nameof(BatchApplication)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); writer.WritePropertyName("displayName"u8); @@ -52,7 +60,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter #endif } } - writer.WriteEndObject(); } BatchApplication IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplicationPackageReference.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplicationPackageReference.Serialization.cs index 3aae3911ba245..36d0573961498 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplicationPackageReference.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchApplicationPackageReference.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchApplicationPackageReference : IUtf8JsonSerializable, I void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M throw new FormatException($"The model {nameof(BatchApplicationPackageReference)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("applicationId"u8); writer.WriteStringValue(ApplicationId); if (Optional.IsDefined(Version)) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M #endif } } - writer.WriteEndObject(); } BatchApplicationPackageReference IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchAutoPoolSpecification.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchAutoPoolSpecification.Serialization.cs index cde73fe116664..9cebf8ebbd62d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchAutoPoolSpecification.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchAutoPoolSpecification.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchAutoPoolSpecification : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchAutoPoolSpecification)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(AutoPoolIdPrefix)) { writer.WritePropertyName("autoPoolIdPrefix"u8); @@ -58,7 +66,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchAutoPoolSpecification IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClient.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClient.cs index 47672eb0c12fb..c77bfa624c581 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClient.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClient.cs @@ -1656,14 +1656,15 @@ public virtual Response RemoveNodes(string poolId, RequestContent content, int? /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will delete the Job even if the corresponding nodes have not fully processed the deletion. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual async Task DeleteJobAsync(string jobId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual async Task DeleteJobAsync(string jobId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); @@ -1671,7 +1672,7 @@ public virtual async Task DeleteJobAsync(string jobId, int? timeOutInS scope.Start(); try { - using HttpMessage message = CreateDeleteJobRequest(jobId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateDeleteJobRequest(jobId, timeOutInSeconds, ocpdate, force, requestConditions, context); return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -1699,14 +1700,15 @@ public virtual async Task DeleteJobAsync(string jobId, int? timeOutInS /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will delete the Job even if the corresponding nodes have not fully processed the deletion. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual Response DeleteJob(string jobId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual Response DeleteJob(string jobId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); @@ -1714,7 +1716,7 @@ public virtual Response DeleteJob(string jobId, int? timeOutInSeconds = null, Da scope.Start(); try { - using HttpMessage message = CreateDeleteJobRequest(jobId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateDeleteJobRequest(jobId, timeOutInSeconds, ocpdate, force, requestConditions, context); return _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -2379,6 +2381,7 @@ public virtual Response EnableJob(string jobId, int? timeOutInSeconds = null, Da /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the Job even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The cancellation token to use. /// is null. @@ -2391,14 +2394,14 @@ public virtual Response EnableJob(string jobId, int? timeOutInSeconds = null, Da /// state, they will remain in the active state. Once a Job is terminated, new /// Tasks cannot be added and any remaining active Tasks will not be scheduled. /// - /// - public virtual async Task TerminateJobAsync(string jobId, BatchJobTerminateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, CancellationToken cancellationToken = default) + /// + public virtual async Task TerminateJobAsync(string jobId, BatchJobTerminateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); using RequestContent content = parameters?.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); - Response response = await TerminateJobAsync(jobId, content, timeOutInSeconds, ocpdate, requestConditions, context).ConfigureAwait(false); + Response response = await TerminateJobAsync(jobId, content, timeOutInSeconds, ocpdate, force, requestConditions, context).ConfigureAwait(false); return response; } @@ -2411,6 +2414,7 @@ public virtual async Task TerminateJobAsync(string jobId, BatchJobTerm /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the Job even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The cancellation token to use. /// is null. @@ -2423,14 +2427,14 @@ public virtual async Task TerminateJobAsync(string jobId, BatchJobTerm /// state, they will remain in the active state. Once a Job is terminated, new /// Tasks cannot be added and any remaining active Tasks will not be scheduled. /// - /// - public virtual Response TerminateJob(string jobId, BatchJobTerminateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, CancellationToken cancellationToken = default) + /// + public virtual Response TerminateJob(string jobId, BatchJobTerminateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); using RequestContent content = parameters?.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); - Response response = TerminateJob(jobId, content, timeOutInSeconds, ocpdate, requestConditions, context); + Response response = TerminateJob(jobId, content, timeOutInSeconds, ocpdate, force, requestConditions, context); return response; } @@ -2444,7 +2448,7 @@ public virtual Response TerminateJob(string jobId, BatchJobTerminateContent para /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -2457,14 +2461,15 @@ public virtual Response TerminateJob(string jobId, BatchJobTerminateContent para /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the Job even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual async Task TerminateJobAsync(string jobId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual async Task TerminateJobAsync(string jobId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); @@ -2472,7 +2477,7 @@ public virtual async Task TerminateJobAsync(string jobId, RequestConte scope.Start(); try { - using HttpMessage message = CreateTerminateJobRequest(jobId, content, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateTerminateJobRequest(jobId, content, timeOutInSeconds, ocpdate, force, requestConditions, context); return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -2492,7 +2497,7 @@ public virtual async Task TerminateJobAsync(string jobId, RequestConte /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -2505,14 +2510,15 @@ public virtual async Task TerminateJobAsync(string jobId, RequestConte /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the Job even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual Response TerminateJob(string jobId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual Response TerminateJob(string jobId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobId, nameof(jobId)); @@ -2520,7 +2526,7 @@ public virtual Response TerminateJob(string jobId, RequestContent content, int? scope.Start(); try { - using HttpMessage message = CreateTerminateJobRequest(jobId, content, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateTerminateJobRequest(jobId, content, timeOutInSeconds, ocpdate, force, requestConditions, context); return _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -2848,14 +2854,15 @@ public virtual Response GetJobTaskCounts(string jobId, int? timeOutInSeconds, Da /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will delete the JobSchedule even if the corresponding nodes have not fully processed the deletion. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual async Task DeleteJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual async Task DeleteJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobScheduleId, nameof(jobScheduleId)); @@ -2863,7 +2870,7 @@ public virtual async Task DeleteJobScheduleAsync(string jobScheduleId, scope.Start(); try { - using HttpMessage message = CreateDeleteJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateDeleteJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, force, requestConditions, context); return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -2891,14 +2898,15 @@ public virtual async Task DeleteJobScheduleAsync(string jobScheduleId, /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will delete the JobSchedule even if the corresponding nodes have not fully processed the deletion. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual Response DeleteJobSchedule(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual Response DeleteJobSchedule(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobScheduleId, nameof(jobScheduleId)); @@ -2906,7 +2914,7 @@ public virtual Response DeleteJobSchedule(string jobScheduleId, int? timeOutInSe scope.Start(); try { - using HttpMessage message = CreateDeleteJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateDeleteJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, force, requestConditions, context); return _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -3502,14 +3510,15 @@ public virtual Response EnableJobSchedule(string jobScheduleId, int? timeOutInSe /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the JobSchedule even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual async Task TerminateJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual async Task TerminateJobScheduleAsync(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobScheduleId, nameof(jobScheduleId)); @@ -3517,7 +3526,7 @@ public virtual async Task TerminateJobScheduleAsync(string jobSchedule scope.Start(); try { - using HttpMessage message = CreateTerminateJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateTerminateJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, force, requestConditions, context); return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -3545,14 +3554,15 @@ public virtual async Task TerminateJobScheduleAsync(string jobSchedule /// current system clock time; set it explicitly if you are calling the REST API /// directly. /// + /// If true, the server will terminate the JobSchedule even if the corresponding nodes have not fully processed the termination. The default value is false. /// The content to send as the request conditions of the request. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// is null. /// is an empty string, and was expected to be non-empty. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual Response TerminateJobSchedule(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestConditions requestConditions = null, RequestContext context = null) + /// + public virtual Response TerminateJobSchedule(string jobScheduleId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, bool? force = null, RequestConditions requestConditions = null, RequestContext context = null) { Argument.AssertNotNullOrEmpty(jobScheduleId, nameof(jobScheduleId)); @@ -3560,7 +3570,7 @@ public virtual Response TerminateJobSchedule(string jobScheduleId, int? timeOutI scope.Start(); try { - using HttpMessage message = CreateTerminateJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, requestConditions, context); + using HttpMessage message = CreateTerminateJobScheduleRequest(jobScheduleId, timeOutInSeconds, ocpdate, force, requestConditions, context); return _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -5746,6 +5756,402 @@ public virtual Response RebootNode(string poolId, string nodeId, RequestContent } } + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Starts the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task StartNodeAsync(string poolId, string nodeId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.StartNode"); + scope.Start(); + try + { + using HttpMessage message = CreateStartNodeRequest(poolId, nodeId, timeOutInSeconds, ocpdate, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Starts the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response StartNode(string poolId, string nodeId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.StartNode"); + scope.Start(); + try + { + using HttpMessage message = CreateStartNodeRequest(poolId, nodeId, timeOutInSeconds, ocpdate, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Reinstalls the operating system on the specified Compute Node. + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The options to use for reimaging the Compute Node. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// + /// You can reinstall the operating system on a Compute Node only if it is in an + /// idle or running state. This API can be invoked only on Pools created with the + /// cloud service configuration property. + /// + /// + public virtual async Task ReimageNodeAsync(string poolId, string nodeId, BatchNodeReimageContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using RequestContent content = parameters?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await ReimageNodeAsync(poolId, nodeId, content, timeOutInSeconds, ocpdate, context).ConfigureAwait(false); + return response; + } + + /// Reinstalls the operating system on the specified Compute Node. + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The options to use for reimaging the Compute Node. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// + /// You can reinstall the operating system on a Compute Node only if it is in an + /// idle or running state. This API can be invoked only on Pools created with the + /// cloud service configuration property. + /// + /// + public virtual Response ReimageNode(string poolId, string nodeId, BatchNodeReimageContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using RequestContent content = parameters?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = ReimageNode(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return response; + } + + /// + /// [Protocol Method] Reinstalls the operating system on the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The content to send as the body of the request. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task ReimageNodeAsync(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.ReimageNode"); + scope.Start(); + try + { + using HttpMessage message = CreateReimageNodeRequest(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Reinstalls the operating system on the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The content to send as the body of the request. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response ReimageNode(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.ReimageNode"); + scope.Start(); + try + { + using HttpMessage message = CreateReimageNodeRequest(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Deallocates the specified Compute Node. + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The options to use for deallocating the Compute Node. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// You can deallocate a Compute Node only if it is in an idle or running state. + /// + public virtual async Task DeallocateNodeAsync(string poolId, string nodeId, BatchNodeDeallocateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using RequestContent content = parameters?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await DeallocateNodeAsync(poolId, nodeId, content, timeOutInSeconds, ocpdate, context).ConfigureAwait(false); + return response; + } + + /// Deallocates the specified Compute Node. + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The options to use for deallocating the Compute Node. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// You can deallocate a Compute Node only if it is in an idle or running state. + /// + public virtual Response DeallocateNode(string poolId, string nodeId, BatchNodeDeallocateContent parameters = null, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using RequestContent content = parameters?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = DeallocateNode(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return response; + } + + /// + /// [Protocol Method] Deallocates the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The content to send as the body of the request. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeallocateNodeAsync(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.DeallocateNode"); + scope.Start(); + try + { + using HttpMessage message = CreateDeallocateNodeRequest(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Deallocates the specified Compute Node. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The ID of the Pool that contains the Compute Node. + /// The ID of the Compute Node that you want to restart. + /// The content to send as the body of the request. + /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". + /// + /// The time the request was issued. Client libraries typically set this to the + /// current system clock time; set it explicitly if you are calling the REST API + /// directly. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeallocateNode(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(poolId, nameof(poolId)); + Argument.AssertNotNullOrEmpty(nodeId, nameof(nodeId)); + + using var scope = ClientDiagnostics.CreateScope("BatchClient.DeallocateNode"); + scope.Start(); + try + { + using HttpMessage message = CreateDeallocateNodeRequest(poolId, nodeId, content, timeOutInSeconds, ocpdate, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + /// Disables Task scheduling on the specified Compute Node. /// The ID of the Pool that contains the Compute Node. /// The ID of the Compute Node on which you want to disable Task scheduling. @@ -6003,9 +6409,8 @@ public virtual Response EnableNodeScheduling(string poolId, string nodeId, int? /// or is null. /// or is an empty string, and was expected to be non-empty. /// - /// Before you can remotely login to a Compute Node using the remote login - /// settings, you must create a user Account on the Compute Node. This API can be - /// invoked only on Pools created with the virtual machine configuration property. + /// Before you can remotely login to a Compute Node using the remote login settings, + /// you must create a user Account on the Compute Node. /// /// public virtual async Task> GetNodeRemoteLoginSettingsAsync(string poolId, string nodeId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) @@ -6031,9 +6436,8 @@ public virtual async Task> GetNodeRemoteL /// or is null. /// or is an empty string, and was expected to be non-empty. /// - /// Before you can remotely login to a Compute Node using the remote login - /// settings, you must create a user Account on the Compute Node. This API can be - /// invoked only on Pools created with the virtual machine configuration property. + /// Before you can remotely login to a Compute Node using the remote login settings, + /// you must create a user Account on the Compute Node. /// /// public virtual Response GetNodeRemoteLoginSettings(string poolId, string nodeId, int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, CancellationToken cancellationToken = default) @@ -7024,7 +7428,7 @@ public virtual Pageable GetApplications(int? timeOutInSeconds, DateT /// The maximum number of items to return in the response. A maximum of 1000 /// applications can be returned. /// - /// + /// /// The earliest time from which to include metrics. This must be at least two and /// a half hours before the current time. If not specified this defaults to the /// start time of the last aggregation interval currently available. @@ -7036,7 +7440,7 @@ public virtual Pageable GetApplications(int? timeOutInSeconds, DateT /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. /// /// The cancellation token to use. /// @@ -7048,11 +7452,11 @@ public virtual Pageable GetApplications(int? timeOutInSeconds, DateT /// last aggregation interval is returned. /// /// - public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, int? maxresults = null, DateTimeOffset? starttime = null, DateTimeOffset? endtime = null, string filter = null, CancellationToken cancellationToken = default) + public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, int? maxresults = null, DateTimeOffset? startTime = null, DateTimeOffset? endtime = null, string filter = null, CancellationToken cancellationToken = default) { RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; - HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); - HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, NextPageRequest, e => BatchPoolUsageMetrics.DeserializeBatchPoolUsageMetrics(e), ClientDiagnostics, _pipeline, "BatchClient.GetPoolUsageMetrics", "value", "odata.nextLink", context); } @@ -7070,7 +7474,7 @@ public virtual AsyncPageable GetPoolUsageMetricsAsync(int /// The maximum number of items to return in the response. A maximum of 1000 /// applications can be returned. /// - /// + /// /// The earliest time from which to include metrics. This must be at least two and /// a half hours before the current time. If not specified this defaults to the /// start time of the last aggregation interval currently available. @@ -7082,7 +7486,7 @@ public virtual AsyncPageable GetPoolUsageMetricsAsync(int /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. /// /// The cancellation token to use. /// @@ -7094,11 +7498,11 @@ public virtual AsyncPageable GetPoolUsageMetricsAsync(int /// last aggregation interval is returned. /// /// - public virtual Pageable GetPoolUsageMetrics(int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, int? maxresults = null, DateTimeOffset? starttime = null, DateTimeOffset? endtime = null, string filter = null, CancellationToken cancellationToken = default) + public virtual Pageable GetPoolUsageMetrics(int? timeOutInSeconds = null, DateTimeOffset? ocpdate = null, int? maxresults = null, DateTimeOffset? startTime = null, DateTimeOffset? endtime = null, string filter = null, CancellationToken cancellationToken = default) { RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; - HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); - HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, NextPageRequest, e => BatchPoolUsageMetrics.DeserializeBatchPoolUsageMetrics(e), ClientDiagnostics, _pipeline, "BatchClient.GetPoolUsageMetrics", "value", "odata.nextLink", context); } @@ -7128,7 +7532,7 @@ public virtual Pageable GetPoolUsageMetrics(int? timeOutI /// The maximum number of items to return in the response. A maximum of 1000 /// applications can be returned. /// - /// + /// /// The earliest time from which to include metrics. This must be at least two and /// a half hours before the current time. If not specified this defaults to the /// start time of the last aggregation interval currently available. @@ -7140,16 +7544,16 @@ public virtual Pageable GetPoolUsageMetrics(int? timeOutI /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. /// - public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? starttime, DateTimeOffset? endtime, string filter, RequestContext context) + public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? startTime, DateTimeOffset? endtime, string filter, RequestContext context) { - HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); - HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, NextPageRequest, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "BatchClient.GetPoolUsageMetrics", "value", "odata.nextLink", context); } @@ -7179,7 +7583,7 @@ public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutIn /// The maximum number of items to return in the response. A maximum of 1000 /// applications can be returned. /// - /// + /// /// The earliest time from which to include metrics. This must be at least two and /// a half hours before the current time. If not specified this defaults to the /// start time of the last aggregation interval currently available. @@ -7191,20 +7595,20 @@ public virtual AsyncPageable GetPoolUsageMetricsAsync(int? timeOutIn /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. /// - public virtual Pageable GetPoolUsageMetrics(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? starttime, DateTimeOffset? endtime, string filter, RequestContext context) + public virtual Pageable GetPoolUsageMetrics(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? startTime, DateTimeOffset? endtime, string filter, RequestContext context) { - HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); - HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, starttime, endtime, filter, context); + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetPoolUsageMetricsRequest(timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => CreateGetPoolUsageMetricsNextPageRequest(nextLink, timeOutInSeconds, ocpdate, maxresults, startTime, endtime, filter, context); return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, NextPageRequest, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "BatchClient.GetPoolUsageMetrics", "value", "odata.nextLink", context); } - /// Lists all of the Pools in the specified Account. + /// Lists all of the Pools which be mounted. /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". /// /// The time the request was issued. Client libraries typically set this to the @@ -7217,7 +7621,7 @@ public virtual Pageable GetPoolUsageMetrics(int? timeOutInSeconds, D /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-pools. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools. /// /// An OData $select clause. /// An OData $expand clause. @@ -7231,7 +7635,7 @@ public virtual AsyncPageable GetPoolsAsync(int? timeOutInSeconds = nu return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, NextPageRequest, e => BatchPool.DeserializeBatchPool(e), ClientDiagnostics, _pipeline, "BatchClient.GetPools", "value", "odata.nextLink", context); } - /// Lists all of the Pools in the specified Account. + /// Lists all of the Pools which be mounted. /// The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. If the value is larger than 30, the default will be used instead.". /// /// The time the request was issued. Client libraries typically set this to the @@ -7244,7 +7648,7 @@ public virtual AsyncPageable GetPoolsAsync(int? timeOutInSeconds = nu /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-pools. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools. /// /// An OData $select clause. /// An OData $expand clause. @@ -7259,7 +7663,7 @@ public virtual Pageable GetPools(int? timeOutInSeconds = null, DateTi } /// - /// [Protocol Method] Lists all of the Pools in the specified Account. + /// [Protocol Method] Lists all of the Pools which be mounted. /// /// /// @@ -7285,7 +7689,7 @@ public virtual Pageable GetPools(int? timeOutInSeconds = null, DateTi /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-pools. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools. /// /// An OData $select clause. /// An OData $expand clause. @@ -7301,7 +7705,7 @@ public virtual AsyncPageable GetPoolsAsync(int? timeOutInSeconds, Da } /// - /// [Protocol Method] Lists all of the Pools in the specified Account. + /// [Protocol Method] Lists all of the Pools which be mounted. /// /// /// @@ -7327,7 +7731,7 @@ public virtual AsyncPageable GetPoolsAsync(int? timeOutInSeconds, Da /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-pools. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools. /// /// An OData $select clause. /// An OData $expand clause. @@ -7355,7 +7759,7 @@ public virtual Pageable GetPools(int? timeOutInSeconds, DateTimeOffs /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The cancellation token to use. /// @@ -7380,7 +7784,7 @@ public virtual AsyncPageable GetSupportedImagesAsync(int? t /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The cancellation token to use. /// @@ -7419,7 +7823,7 @@ public virtual Pageable GetSupportedImages(int? timeOutInSe /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. @@ -7459,7 +7863,7 @@ public virtual AsyncPageable GetSupportedImagesAsync(int? timeOutInS /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. @@ -7489,7 +7893,7 @@ public virtual Pageable GetSupportedImages(int? timeOutInSeconds, Da /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The cancellation token to use. /// @@ -7518,7 +7922,7 @@ public virtual AsyncPageable GetPoolNodeCountsAsync(int? ti /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The cancellation token to use. /// @@ -7559,7 +7963,7 @@ public virtual Pageable GetPoolNodeCounts(int? timeOutInSec /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. @@ -7601,7 +8005,7 @@ public virtual AsyncPageable GetPoolNodeCountsAsync(int? timeOutInSe /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-support-images. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images. /// /// The request context, which can override default behaviors of the client pipeline on a per-call basis. /// Service returned a non-success status code. @@ -7627,7 +8031,7 @@ public virtual Pageable GetPoolNodeCounts(int? timeOutInSeconds, Dat /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs. /// /// An OData $select clause. /// An OData $expand clause. @@ -7654,7 +8058,7 @@ public virtual AsyncPageable GetJobsAsync(int? timeOutInSeconds = null /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs. /// /// An OData $select clause. /// An OData $expand clause. @@ -7695,7 +8099,7 @@ public virtual Pageable GetJobs(int? timeOutInSeconds = null, DateTime /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs. /// /// An OData $select clause. /// An OData $expand clause. @@ -7737,7 +8141,7 @@ public virtual AsyncPageable GetJobsAsync(int? timeOutInSeconds, Dat /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs. /// /// An OData $select clause. /// An OData $expand clause. @@ -7766,7 +8170,7 @@ public virtual Pageable GetJobs(int? timeOutInSeconds, DateTimeOffse /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. /// /// An OData $select clause. /// An OData $expand clause. @@ -7798,7 +8202,7 @@ public virtual AsyncPageable GetJobsFromSchedulesAsync(string jobSched /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. /// /// An OData $select clause. /// An OData $expand clause. @@ -7844,7 +8248,7 @@ public virtual Pageable GetJobsFromSchedules(string jobScheduleId, int /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. /// /// An OData $select clause. /// An OData $expand clause. @@ -7891,7 +8295,7 @@ public virtual AsyncPageable GetJobsFromSchedulesAsync(string jobSch /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule. /// /// An OData $select clause. /// An OData $expand clause. @@ -7927,7 +8331,7 @@ public virtual Pageable GetJobsFromSchedules(string jobScheduleId, i /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. /// /// An OData $select clause. /// The cancellation token to use. @@ -7969,7 +8373,7 @@ public virtual AsyncPageable GetJobPrep /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. /// /// An OData $select clause. /// The cancellation token to use. @@ -8023,7 +8427,7 @@ public virtual Pageable GetJobPreparati /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. /// /// An OData $select clause. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -8070,7 +8474,7 @@ public virtual AsyncPageable GetJobPreparationAndReleaseTaskStatuses /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status. /// /// An OData $select clause. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -8101,7 +8505,7 @@ public virtual Pageable GetJobPreparationAndReleaseTaskStatuses(stri /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. /// /// An OData $select clause. /// An OData $expand clause. @@ -8128,7 +8532,7 @@ public virtual AsyncPageable GetJobSchedulesAsync(int? timeOut /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. /// /// An OData $select clause. /// An OData $expand clause. @@ -8169,7 +8573,7 @@ public virtual Pageable GetJobSchedules(int? timeOutInSeconds /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. /// /// An OData $select clause. /// An OData $expand clause. @@ -8211,7 +8615,7 @@ public virtual AsyncPageable GetJobSchedulesAsync(int? timeOutInSeco /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules. /// /// An OData $select clause. /// An OData $expand clause. @@ -8240,7 +8644,7 @@ public virtual Pageable GetJobSchedules(int? timeOutInSeconds, DateT /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-tasks. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks. /// /// An OData $select clause. /// An OData $expand clause. @@ -8277,7 +8681,7 @@ public virtual AsyncPageable GetTasksAsync(string jobId, int? timeOut /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-tasks. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks. /// /// An OData $select clause. /// An OData $expand clause. @@ -8328,7 +8732,7 @@ public virtual Pageable GetTasks(string jobId, int? timeOutInSeconds /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-tasks. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks. /// /// An OData $select clause. /// An OData $expand clause. @@ -8375,7 +8779,7 @@ public virtual AsyncPageable GetTasksAsync(string jobId, int? timeOu /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-tasks. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks. /// /// An OData $select clause. /// An OData $expand clause. @@ -8549,7 +8953,7 @@ public virtual Pageable GetSubTasks(string jobId, string taskId, int /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-task-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files. /// /// /// Whether to list children of the Task directory. This parameter can be used in @@ -8585,7 +8989,7 @@ public virtual AsyncPageable GetTaskFilesAsync(string jobId, stri /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-task-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files. /// /// /// Whether to list children of the Task directory. This parameter can be used in @@ -8635,7 +9039,7 @@ public virtual Pageable GetTaskFiles(string jobId, string taskId, /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-task-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files. /// /// /// Whether to list children of the Task directory. This parameter can be used in @@ -8686,7 +9090,7 @@ public virtual AsyncPageable GetTaskFilesAsync(string jobId, string /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-task-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files. /// /// /// Whether to list children of the Task directory. This parameter can be used in @@ -8722,7 +9126,7 @@ public virtual Pageable GetTaskFiles(string jobId, string taskId, in /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. /// /// An OData $select clause. /// The cancellation token to use. @@ -8753,7 +9157,7 @@ public virtual AsyncPageable GetNodesAsync(string poolId, int? timeOu /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. /// /// An OData $select clause. /// The cancellation token to use. @@ -8798,7 +9202,7 @@ public virtual Pageable GetNodes(string poolId, int? timeOutInSeconds /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. /// /// An OData $select clause. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -8844,7 +9248,7 @@ public virtual AsyncPageable GetNodesAsync(string poolId, int? timeO /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool. /// /// An OData $select clause. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -9023,7 +9427,7 @@ public virtual Pageable GetNodeExtensions(string poolId, string node /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. /// /// Whether to list children of a directory. /// The cancellation token to use. @@ -9056,7 +9460,7 @@ public virtual AsyncPageable GetNodeFilesAsync(string poolId, str /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. /// /// Whether to list children of a directory. /// The cancellation token to use. @@ -9103,7 +9507,7 @@ public virtual Pageable GetNodeFiles(string poolId, string nodeId /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. /// /// Whether to list children of a directory. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -9151,7 +9555,7 @@ public virtual AsyncPageable GetNodeFilesAsync(string poolId, string /// /// /// An OData $filter clause. For more information on constructing this filter, see - /// https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. + /// https://docs.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files. /// /// Whether to list children of a directory. /// The request context, which can override default behaviors of the client pipeline on a per-call basis. @@ -9223,7 +9627,7 @@ internal HttpMessage CreateGetApplicationRequest(string applicationId, int? time return message; } - internal HttpMessage CreateGetPoolUsageMetricsRequest(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? starttime, DateTimeOffset? endtime, string filter, RequestContext context) + internal HttpMessage CreateGetPoolUsageMetricsRequest(int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? startTime, DateTimeOffset? endtime, string filter, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); var request = message.Request; @@ -9240,9 +9644,9 @@ internal HttpMessage CreateGetPoolUsageMetricsRequest(int? timeOutInSeconds, Dat { uri.AppendQuery("maxresults", maxresults.Value, true); } - if (starttime != null) + if (startTime != null) { - uri.AppendQuery("startTime", starttime.Value, "O", true); + uri.AppendQuery("startTime", startTime.Value, "O", true); } if (endtime != null) { @@ -9727,7 +10131,7 @@ internal HttpMessage CreateGetPoolNodeCountsRequest(int? timeOutInSeconds, DateT return message; } - internal HttpMessage CreateDeleteJobRequest(string jobId, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestConditions requestConditions, RequestContext context) + internal HttpMessage CreateDeleteJobRequest(string jobId, int? timeOutInSeconds, DateTimeOffset? ocpdate, bool? force, RequestConditions requestConditions, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier202); var request = message.Request; @@ -9741,6 +10145,10 @@ internal HttpMessage CreateDeleteJobRequest(string jobId, int? timeOutInSeconds, { uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); } + if (force != null) + { + uri.AppendQuery("force", force.Value, true); + } request.Uri = uri; request.Headers.Add("Accept", "application/json"); request.Headers.Add("client-request-id", message.Request.ClientRequestId); @@ -9917,7 +10325,7 @@ internal HttpMessage CreateEnableJobRequest(string jobId, int? timeOutInSeconds, return message; } - internal HttpMessage CreateTerminateJobRequest(string jobId, RequestContent content, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestConditions requestConditions, RequestContext context) + internal HttpMessage CreateTerminateJobRequest(string jobId, RequestContent content, int? timeOutInSeconds, DateTimeOffset? ocpdate, bool? force, RequestConditions requestConditions, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier202); var request = message.Request; @@ -9932,6 +10340,10 @@ internal HttpMessage CreateTerminateJobRequest(string jobId, RequestContent cont { uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); } + if (force != null) + { + uri.AppendQuery("force", force.Value, true); + } request.Uri = uri; request.Headers.Add("Accept", "application/json"); request.Headers.Add("client-request-id", message.Request.ClientRequestId); @@ -10150,7 +10562,7 @@ internal HttpMessage CreateJobScheduleExistsRequest(string jobScheduleId, int? t return message; } - internal HttpMessage CreateDeleteJobScheduleRequest(string jobScheduleId, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestConditions requestConditions, RequestContext context) + internal HttpMessage CreateDeleteJobScheduleRequest(string jobScheduleId, int? timeOutInSeconds, DateTimeOffset? ocpdate, bool? force, RequestConditions requestConditions, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier202); var request = message.Request; @@ -10164,6 +10576,10 @@ internal HttpMessage CreateDeleteJobScheduleRequest(string jobScheduleId, int? t { uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); } + if (force != null) + { + uri.AppendQuery("force", force.Value, true); + } request.Uri = uri; request.Headers.Add("Accept", "application/json"); request.Headers.Add("client-request-id", message.Request.ClientRequestId); @@ -10338,7 +10754,7 @@ internal HttpMessage CreateEnableJobScheduleRequest(string jobScheduleId, int? t return message; } - internal HttpMessage CreateTerminateJobScheduleRequest(string jobScheduleId, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestConditions requestConditions, RequestContext context) + internal HttpMessage CreateTerminateJobScheduleRequest(string jobScheduleId, int? timeOutInSeconds, DateTimeOffset? ocpdate, bool? force, RequestConditions requestConditions, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier202); var request = message.Request; @@ -10353,6 +10769,10 @@ internal HttpMessage CreateTerminateJobScheduleRequest(string jobScheduleId, int { uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); } + if (force != null) + { + uri.AppendQuery("force", force.Value, true); + } request.Uri = uri; request.Headers.Add("Accept", "application/json"); request.Headers.Add("client-request-id", message.Request.ClientRequestId); @@ -11025,6 +11445,94 @@ internal HttpMessage CreateRebootNodeRequest(string poolId, string nodeId, Reque return message; } + internal HttpMessage CreateStartNodeRequest(string poolId, string nodeId, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/pools/", false); + uri.AppendPath(poolId, true); + uri.AppendPath("/nodes/", false); + uri.AppendPath(nodeId, true); + uri.AppendPath("/start", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (timeOutInSeconds != null) + { + uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("client-request-id", message.Request.ClientRequestId); + request.Headers.Add("return-client-request-id", "true"); + if (ocpdate != null) + { + request.Headers.Add("ocp-date", ocpdate.Value, "R"); + } + return message; + } + + internal HttpMessage CreateReimageNodeRequest(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/pools/", false); + uri.AppendPath(poolId, true); + uri.AppendPath("/nodes/", false); + uri.AppendPath(nodeId, true); + uri.AppendPath("/reimage", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (timeOutInSeconds != null) + { + uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("client-request-id", message.Request.ClientRequestId); + request.Headers.Add("return-client-request-id", "true"); + if (ocpdate != null) + { + request.Headers.Add("ocp-date", ocpdate.Value, "R"); + } + request.Headers.Add("Content-Type", "application/json; odata=minimalmetadata"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeallocateNodeRequest(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/pools/", false); + uri.AppendPath(poolId, true); + uri.AppendPath("/nodes/", false); + uri.AppendPath(nodeId, true); + uri.AppendPath("/deallocate", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (timeOutInSeconds != null) + { + uri.AppendQuery("timeOut", timeOutInSeconds.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("client-request-id", message.Request.ClientRequestId); + request.Headers.Add("return-client-request-id", "true"); + if (ocpdate != null) + { + request.Headers.Add("ocp-date", ocpdate.Value, "R"); + } + request.Headers.Add("Content-Type", "application/json; odata=minimalmetadata"); + request.Content = content; + return message; + } + internal HttpMessage CreateDisableNodeSchedulingRequest(string poolId, string nodeId, RequestContent content, int? timeOutInSeconds, DateTimeOffset? ocpdate, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -11410,7 +11918,7 @@ internal HttpMessage CreateGetApplicationsNextPageRequest(string nextLink, int? return message; } - internal HttpMessage CreateGetPoolUsageMetricsNextPageRequest(string nextLink, int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? starttime, DateTimeOffset? endtime, string filter, RequestContext context) + internal HttpMessage CreateGetPoolUsageMetricsNextPageRequest(string nextLink, int? timeOutInSeconds, DateTimeOffset? ocpdate, int? maxresults, DateTimeOffset? startTime, DateTimeOffset? endtime, string filter, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); var request = message.Request; diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClientOptions.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClientOptions.cs index eab2e5942414c..c4f6c163cc57d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClientOptions.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchClientOptions.cs @@ -13,13 +13,13 @@ namespace Azure.Compute.Batch /// Client options for BatchClient. public partial class BatchClientOptions : ClientOptions { - private const ServiceVersion LatestVersion = ServiceVersion.V2024_02_01_19_0; + private const ServiceVersion LatestVersion = ServiceVersion.V2024_07_01_20_0; /// The version of the service to use. public enum ServiceVersion { - /// Service version "2024-02-01.19.0". - V2024_02_01_19_0 = 1, + /// Service version "2024-07-01.20.0". + V2024_07_01_20_0 = 1, } internal string Version { get; } @@ -29,7 +29,7 @@ public BatchClientOptions(ServiceVersion version = LatestVersion) { Version = version switch { - ServiceVersion.V2024_02_01_19_0 => "2024-02-01.19.0", + ServiceVersion.V2024_07_01_20_0 => "2024-07-01.20.0", _ => throw new NotSupportedException() }; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchError.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchError.Serialization.cs index 116c883bb17c8..e4028a6aa79b3 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchError.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchError.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchError : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption throw new FormatException($"The model {nameof(BatchError)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("code"u8); writer.WriteStringValue(Code); if (Optional.IsDefined(Message)) @@ -58,7 +66,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption #endif } } - writer.WriteEndObject(); } BatchError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorDetail.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorDetail.Serialization.cs index d00a4bf014bf3..b8d1be12f3c5c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorDetail.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorDetail.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchErrorDetail : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter throw new FormatException($"The model {nameof(BatchErrorDetail)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Key)) { writer.WritePropertyName("key"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter #endif } } - writer.WriteEndObject(); } BatchErrorDetail IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorMessage.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorMessage.Serialization.cs index cb1c107a40ccc..efa86a4eba2a3 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorMessage.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchErrorMessage.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchErrorMessage : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite throw new FormatException($"The model {nameof(BatchErrorMessage)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Lang)) { writer.WritePropertyName("lang"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite #endif } } - writer.WriteEndObject(); } BatchErrorMessage IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.Serialization.cs index 70ba89ca2ada5..069986fe70d75 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJob : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions throw new FormatException($"The model {nameof(BatchJob)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (options.Format != "W" && Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -178,7 +186,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions #endif } } - writer.WriteEndObject(); } BatchJob IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.cs index b516fefc12923..3084acc369287 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJob.cs @@ -83,7 +83,7 @@ public BatchJob(BatchPoolInfo poolInfo) /// The network configuration for the Job. /// A list of name-value pairs associated with the Job as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. /// The execution information for the Job. - /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the CloudJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the BatchJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. /// Keeps track of any properties unknown to the library. internal BatchJob(string id, string displayName, bool? usesTaskDependencies, string url, string eTag, DateTimeOffset? lastModified, DateTimeOffset? creationTime, BatchJobState? state, DateTimeOffset? stateTransitionTime, BatchJobState? previousState, DateTimeOffset? previousStateTransitionTime, int? priority, bool? allowTaskPreemption, int? maxParallelTasks, BatchJobConstraints constraints, BatchJobManagerTask jobManagerTask, BatchJobPreparationTask jobPreparationTask, BatchJobReleaseTask jobReleaseTask, IReadOnlyList commonEnvironmentSettings, BatchPoolInfo poolInfo, OnAllBatchTasksComplete? onAllTasksComplete, OnBatchTaskFailure? onTaskFailure, BatchJobNetworkConfiguration networkConfiguration, IList metadata, BatchJobExecutionInfo executionInfo, BatchJobStatistics stats, IDictionary serializedAdditionalRawData) { @@ -171,7 +171,7 @@ internal BatchJob() public IList Metadata { get; } /// The execution information for the Job. public BatchJobExecutionInfo ExecutionInfo { get; } - /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the CloudJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the BatchJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. public BatchJobStatistics Stats { get; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobConstraints.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobConstraints.Serialization.cs index bfe4dceb06631..605d3fe777830 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobConstraints.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobConstraints.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobConstraints : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchJobConstraints)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(MaxWallClockTime)) { writer.WritePropertyName("maxWallClockTime"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchJobConstraints IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobCreateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobCreateContent.Serialization.cs index b091245b29d55..0d3c1dfeccb1e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobCreateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobCreateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobCreateContent : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchJobCreateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (Optional.IsDefined(DisplayName)) @@ -125,7 +133,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchJobCreateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobDisableContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobDisableContent.Serialization.cs index c9728fe091e73..a195a1746a304 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobDisableContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobDisableContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobDisableContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchJobDisableContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("disableTasks"u8); writer.WriteStringValue(DisableTasks.ToString()); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchJobDisableContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobExecutionInfo.Serialization.cs index 474ef422c50ed..b15b497c27e01 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobExecutionInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchJobExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); if (Optional.IsDefined(EndTime)) @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchJobExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.Serialization.cs index de8469b51cc05..1a6bb39a6fa59 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobManagerTask : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchJobManagerTask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (Optional.IsDefined(DisplayName)) @@ -130,7 +138,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchJobManagerTask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.cs index 7f679bb2fb593..ef3f26a7fbaac 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobManagerTask.cs @@ -71,7 +71,7 @@ public partial class BatchJobManagerTask /// Initializes a new instance of . /// A string that uniquely identifies the Job Manager Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. - /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// or is null. public BatchJobManagerTask(string id, string commandLine) { @@ -89,7 +89,7 @@ public BatchJobManagerTask(string id, string commandLine) /// Initializes a new instance of . /// A string that uniquely identifies the Job Manager Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. /// The display name of the Job Manager Task. It need not be unique and can contain any Unicode characters up to a maximum length of 1024. - /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the Job Manager Task runs. If the Pool that will run this Task has containerConfiguration set, this must be set as well. If the Pool that will run this Task doesn't have containerConfiguration set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. Files listed under this element are located in the Task's working directory. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. /// A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. @@ -141,7 +141,7 @@ internal BatchJobManagerTask() public string Id { get; set; } /// The display name of the Job Manager Task. It need not be unique and can contain any Unicode characters up to a maximum length of 1024. public string DisplayName { get; set; } - /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Manager Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). public string CommandLine { get; set; } /// The settings for the container under which the Job Manager Task runs. If the Pool that will run this Task has containerConfiguration set, this must be set as well. If the Pool that will run this Task doesn't have containerConfiguration set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. public BatchTaskContainerSettings ContainerSettings { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.Serialization.cs index 9345163de4357..793ede70b803f 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobNetworkConfiguration : IUtf8JsonSerializable, IJson void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,9 +34,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model throw new FormatException($"The model {nameof(BatchJobNetworkConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("subnetId"u8); writer.WriteStringValue(SubnetId); + writer.WritePropertyName("skipWithdrawFromVNet"u8); + writer.WriteBooleanValue(SkipWithdrawFromVNet); if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -43,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model #endif } } - writer.WriteEndObject(); } BatchJobNetworkConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -67,6 +76,7 @@ internal static BatchJobNetworkConfiguration DeserializeBatchJobNetworkConfigura return null; } string subnetId = default; + bool skipWithdrawFromVNet = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -76,13 +86,18 @@ internal static BatchJobNetworkConfiguration DeserializeBatchJobNetworkConfigura subnetId = property.Value.GetString(); continue; } + if (property.NameEquals("skipWithdrawFromVNet"u8)) + { + skipWithdrawFromVNet = property.Value.GetBoolean(); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new BatchJobNetworkConfiguration(subnetId, serializedAdditionalRawData); + return new BatchJobNetworkConfiguration(subnetId, skipWithdrawFromVNet, serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.cs index d6697158bf5e3..c00f0c2ef2e43 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobNetworkConfiguration.cs @@ -46,21 +46,25 @@ public partial class BatchJobNetworkConfiguration private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. This will only work with a VirtualMachineConfiguration Pool. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// Whether to withdraw Compute Nodes from the virtual network to DNC when the job is terminated or deleted. If true, nodes will remain joined to the virtual network to DNC. If false, nodes will automatically withdraw when the job ends. Defaults to false. /// is null. - public BatchJobNetworkConfiguration(string subnetId) + public BatchJobNetworkConfiguration(string subnetId, bool skipWithdrawFromVNet) { Argument.AssertNotNull(subnetId, nameof(subnetId)); SubnetId = subnetId; + SkipWithdrawFromVNet = skipWithdrawFromVNet; } /// Initializes a new instance of . - /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. This will only work with a VirtualMachineConfiguration Pool. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// Whether to withdraw Compute Nodes from the virtual network to DNC when the job is terminated or deleted. If true, nodes will remain joined to the virtual network to DNC. If false, nodes will automatically withdraw when the job ends. Defaults to false. /// Keeps track of any properties unknown to the library. - internal BatchJobNetworkConfiguration(string subnetId, IDictionary serializedAdditionalRawData) + internal BatchJobNetworkConfiguration(string subnetId, bool skipWithdrawFromVNet, IDictionary serializedAdditionalRawData) { SubnetId = subnetId; + SkipWithdrawFromVNet = skipWithdrawFromVNet; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -69,7 +73,9 @@ internal BatchJobNetworkConfiguration() { } - /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. This will only work with a VirtualMachineConfiguration Pool. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// The ARM resource identifier of the virtual network subnet which Compute Nodes running Tasks from the Job will join for the duration of the Task. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes which will run Tasks from the Job. This can be up to the number of Compute Nodes in the Pool. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet so that Azure Batch service can schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication from the Azure Batch service. For Pools created with a Virtual Machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Port 443 is also required to be open for outbound connections for communications to Azure Storage. For more details see: https://docs.microsoft.com/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. public string SubnetId { get; set; } + /// Whether to withdraw Compute Nodes from the virtual network to DNC when the job is terminated or deleted. If true, nodes will remain joined to the virtual network to DNC. If false, nodes will automatically withdraw when the job ends. Defaults to false. + public bool SkipWithdrawFromVNet { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationAndReleaseTaskStatus.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationAndReleaseTaskStatus.Serialization.cs index ed82f3ea661e3..852887c7dbeab 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationAndReleaseTaskStatus.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationAndReleaseTaskStatus.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobPreparationAndReleaseTaskStatus : IUtf8JsonSerializ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter wr throw new FormatException($"The model {nameof(BatchJobPreparationAndReleaseTaskStatus)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(PoolId)) { writer.WritePropertyName("poolId"u8); @@ -66,7 +74,6 @@ void IJsonModel.Write(Utf8JsonWriter wr #endif } } - writer.WriteEndObject(); } BatchJobPreparationAndReleaseTaskStatus IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.Serialization.cs index 86bd6b822b3b6..f8398b15fdb26 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobPreparationTask : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade throw new FormatException($"The model {nameof(BatchJobPreparationTask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -93,7 +101,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade #endif } } - writer.WriteEndObject(); } BatchJobPreparationTask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.cs index ce02002b17a04..8e81292906839 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTask.cs @@ -73,7 +73,7 @@ public partial class BatchJobPreparationTask private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// is null. public BatchJobPreparationTask(string commandLine) { @@ -86,7 +86,7 @@ public BatchJobPreparationTask(string commandLine) /// Initializes a new instance of . /// A string that uniquely identifies the Job Preparation Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. If you do not specify this property, the Batch service assigns a default value of 'jobpreparation'. No other Task in the Job can have the same ID as the Job Preparation Task. If you try to submit a Task with the same id, the Batch service rejects the request with error code TaskIdSameAsJobPreparationTask; if you are calling the REST API directly, the HTTP status code is 409 (Conflict). - /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the Job Preparation Task runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. Files listed under this element are located in the Task's working directory. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. /// A list of environment variable settings for the Job Preparation Task. @@ -116,7 +116,7 @@ internal BatchJobPreparationTask() /// A string that uniquely identifies the Job Preparation Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. If you do not specify this property, the Batch service assigns a default value of 'jobpreparation'. No other Task in the Job can have the same ID as the Job Preparation Task. If you try to submit a Task with the same id, the Batch service rejects the request with error code TaskIdSameAsJobPreparationTask; if you are calling the REST API directly, the HTTP status code is 409 (Conflict). public string Id { get; set; } - /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Preparation Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). public string CommandLine { get; set; } /// The settings for the container under which the Job Preparation Task runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. public BatchTaskContainerSettings ContainerSettings { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTaskExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTaskExecutionInfo.Serialization.cs index 96e55f511fcb8..6498826b59188 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTaskExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobPreparationTaskExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobPreparationTaskExecutionInfo : IUtf8JsonSerializabl void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter write throw new FormatException($"The model {nameof(BatchJobPreparationTaskExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); if (Optional.IsDefined(EndTime)) @@ -87,7 +95,6 @@ void IJsonModel.Write(Utf8JsonWriter write #endif } } - writer.WriteEndObject(); } BatchJobPreparationTaskExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.Serialization.cs index 22d13533c3926..7c385f4b70f10 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobReleaseTask : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchJobReleaseTask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -88,7 +96,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchJobReleaseTask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.cs index acbf28a11f7d3..ac84972b80ef1 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTask.cs @@ -63,7 +63,7 @@ public partial class BatchJobReleaseTask private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// is null. public BatchJobReleaseTask(string commandLine) { @@ -76,7 +76,7 @@ public BatchJobReleaseTask(string commandLine) /// Initializes a new instance of . /// A string that uniquely identifies the Job Release Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. If you do not specify this property, the Batch service assigns a default value of 'jobrelease'. No other Task in the Job can have the same ID as the Job Release Task. If you try to submit a Task with the same id, the Batch service rejects the request with error code TaskIdSameAsJobReleaseTask; if you are calling the REST API directly, the HTTP status code is 409 (Conflict). - /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the Job Release Task runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. Files listed under this element are located in the Task's working directory. /// A list of environment variable settings for the Job Release Task. @@ -104,7 +104,7 @@ internal BatchJobReleaseTask() /// A string that uniquely identifies the Job Release Task within the Job. The ID can contain any combination of alphanumeric characters including hyphens and underscores and cannot contain more than 64 characters. If you do not specify this property, the Batch service assigns a default value of 'jobrelease'. No other Task in the Job can have the same ID as the Job Release Task. If you try to submit a Task with the same id, the Batch service rejects the request with error code TaskIdSameAsJobReleaseTask; if you are calling the REST API directly, the HTTP status code is 409 (Conflict). public string Id { get; set; } - /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Job Release Task. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). public string CommandLine { get; set; } /// The settings for the container under which the Job Release Task runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. public BatchTaskContainerSettings ContainerSettings { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTaskExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTaskExecutionInfo.Serialization.cs index af2ca9be37313..e0892429563bd 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTaskExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobReleaseTaskExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobReleaseTaskExecutionInfo : IUtf8JsonSerializable, I void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M throw new FormatException($"The model {nameof(BatchJobReleaseTaskExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); if (Optional.IsDefined(EndTime)) @@ -80,7 +88,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, M #endif } } - writer.WriteEndObject(); } BatchJobReleaseTaskExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedule.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedule.Serialization.cs index 9ffbc11fee3dd..8195a7616034d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedule.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedule.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobSchedule : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter throw new FormatException($"The model {nameof(BatchJobSchedule)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (options.Format != "W" && Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -118,7 +126,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter #endif } } - writer.WriteEndObject(); } BatchJobSchedule IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleConfiguration.Serialization.cs index 9565ce4d9fb8f..d8b43da275992 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobScheduleConfiguration : IUtf8JsonSerializable, IJso void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode throw new FormatException($"The model {nameof(BatchJobScheduleConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(DoNotRunUntil)) { writer.WritePropertyName("doNotRunUntil"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode #endif } } - writer.WriteEndObject(); } BatchJobScheduleConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleCreateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleCreateContent.Serialization.cs index c92614a3fc9f3..c18cb66efb4b9 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleCreateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleCreateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobScheduleCreateContent : IUtf8JsonSerializable, IJso void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode throw new FormatException($"The model {nameof(BatchJobScheduleCreateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (Optional.IsDefined(DisplayName)) @@ -62,7 +70,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode #endif } } - writer.WriteEndObject(); } BatchJobScheduleCreateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleExecutionInfo.Serialization.cs index e467ae34e11d9..24ac07f4e9a60 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobScheduleExecutionInfo : IUtf8JsonSerializable, IJso void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode throw new FormatException($"The model {nameof(BatchJobScheduleExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(NextRunTime)) { writer.WritePropertyName("nextRunTime"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode #endif } } - writer.WriteEndObject(); } BatchJobScheduleExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleStatistics.Serialization.cs index c5a3c95add21f..cd89b90da4605 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobScheduleStatistics : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchJobScheduleStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("url"u8); writer.WriteStringValue(Url); writer.WritePropertyName("startTime"u8); @@ -69,7 +77,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchJobScheduleStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleUpdateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleUpdateContent.Serialization.cs index eeea81dd97f28..677f2df5e33a8 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleUpdateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobScheduleUpdateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobScheduleUpdateContent : IUtf8JsonSerializable, IJso void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode throw new FormatException($"The model {nameof(BatchJobScheduleUpdateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Schedule)) { writer.WritePropertyName("schedule"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode #endif } } - writer.WriteEndObject(); } BatchJobScheduleUpdateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedulingError.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedulingError.Serialization.cs index cf976b1262e65..b469072a9860c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedulingError.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSchedulingError.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobSchedulingError : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade throw new FormatException($"The model {nameof(BatchJobSchedulingError)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("category"u8); writer.WriteStringValue(Category.ToString()); if (Optional.IsDefined(Code)) @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade #endif } } - writer.WriteEndObject(); } BatchJobSchedulingError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSpecification.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSpecification.Serialization.cs index c55ac93bd1225..f5b94b119f7c5 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSpecification.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobSpecification.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobSpecification : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchJobSpecification)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Priority)) { writer.WritePropertyName("priority"u8); @@ -123,7 +131,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchJobSpecification IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobStatistics.Serialization.cs index 27a9f31eaf0c0..5e20548c7c1c4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobStatistics : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(BatchJobStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("url"u8); writer.WriteStringValue(Url); writer.WritePropertyName("startTime"u8); @@ -69,7 +77,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } BatchJobStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobTerminateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobTerminateContent.Serialization.cs index 72f3d8bd44fa2..d6abf0f51ea57 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobTerminateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobTerminateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobTerminateContent : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(BatchJobTerminateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(TerminationReason)) { writer.WritePropertyName("terminateReason"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } BatchJobTerminateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.Serialization.cs index 6f849ee7b5853..c865fc428c1ac 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchJobUpdateContent : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchJobUpdateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Priority)) { writer.WritePropertyName("priority"u8); @@ -66,6 +74,11 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW } writer.WriteEndArray(); } + if (Optional.IsDefined(NetworkConfiguration)) + { + writer.WritePropertyName("networkConfiguration"u8); + writer.WriteObjectValue(NetworkConfiguration, options); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -81,7 +94,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchJobUpdateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -111,6 +123,7 @@ internal static BatchJobUpdateContent DeserializeBatchJobUpdateContent(JsonEleme BatchPoolInfo poolInfo = default; OnAllBatchTasksComplete? onAllTasksComplete = default; IList metadata = default; + BatchJobNetworkConfiguration networkConfiguration = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -183,6 +196,15 @@ internal static BatchJobUpdateContent DeserializeBatchJobUpdateContent(JsonEleme metadata = array; continue; } + if (property.NameEquals("networkConfiguration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + networkConfiguration = BatchJobNetworkConfiguration.DeserializeBatchJobNetworkConfiguration(property.Value, options); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); @@ -197,6 +219,7 @@ internal static BatchJobUpdateContent DeserializeBatchJobUpdateContent(JsonEleme poolInfo, onAllTasksComplete, metadata ?? new ChangeTrackingList(), + networkConfiguration, serializedAdditionalRawData); } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.cs index 5776641d88eaf..0cbcd028e9a1c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchJobUpdateContent.cs @@ -59,8 +59,9 @@ public BatchJobUpdateContent() /// The Pool on which the Batch service runs the Job's Tasks. You may change the Pool for a Job only when the Job is disabled. The Patch Job call will fail if you include the poolInfo element and the Job is not disabled. If you specify an autoPoolSpecification in the poolInfo, only the keepAlive property of the autoPoolSpecification can be updated, and then only if the autoPoolSpecification has a poolLifetimeOption of Job (other job properties can be updated as normal). If omitted, the Job continues to run on its current Pool. /// The action the Batch service should take when all Tasks in the Job are in the completed state. If omitted, the completion behavior is left unchanged. You may not change the value from terminatejob to noaction - that is, once you have engaged automatic Job termination, you cannot turn it off again. If you try to do this, the request fails with an 'invalid property value' error response; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). /// A list of name-value pairs associated with the Job as metadata. If omitted, the existing Job metadata is left unchanged. + /// The network configuration for the Job. /// Keeps track of any properties unknown to the library. - internal BatchJobUpdateContent(int? priority, bool? allowTaskPreemption, int? maxParallelTasks, BatchJobConstraints constraints, BatchPoolInfo poolInfo, OnAllBatchTasksComplete? onAllTasksComplete, IList metadata, IDictionary serializedAdditionalRawData) + internal BatchJobUpdateContent(int? priority, bool? allowTaskPreemption, int? maxParallelTasks, BatchJobConstraints constraints, BatchPoolInfo poolInfo, OnAllBatchTasksComplete? onAllTasksComplete, IList metadata, BatchJobNetworkConfiguration networkConfiguration, IDictionary serializedAdditionalRawData) { Priority = priority; AllowTaskPreemption = allowTaskPreemption; @@ -69,6 +70,7 @@ internal BatchJobUpdateContent(int? priority, bool? allowTaskPreemption, int? ma PoolInfo = poolInfo; OnAllTasksComplete = onAllTasksComplete; Metadata = metadata; + NetworkConfiguration = networkConfiguration; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -86,5 +88,7 @@ internal BatchJobUpdateContent(int? priority, bool? allowTaskPreemption, int? ma public OnAllBatchTasksComplete? OnAllTasksComplete { get; set; } /// A list of name-value pairs associated with the Job as metadata. If omitted, the existing Job metadata is left unchanged. public IList Metadata { get; } + /// The network configuration for the Job. + public BatchJobNetworkConfiguration NetworkConfiguration { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNode.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNode.Serialization.cs index f37cee7103fa6..a42066854effa 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNode.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNode.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNode : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions throw new FormatException($"The model {nameof(BatchNode)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -161,7 +169,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions #endif } } - writer.WriteEndObject(); } BatchNode IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeAgentInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeAgentInfo.Serialization.cs index 7ba74ff308f94..1619e36e13fed 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeAgentInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeAgentInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeAgentInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(BatchNodeAgentInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("version"u8); writer.WriteStringValue(Version); writer.WritePropertyName("lastUpdateTime"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } BatchNodeAgentInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.Serialization.cs index 97f6bfb824bfe..10b647dc8bafd 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeCounts : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO throw new FormatException($"The model {nameof(BatchNodeCounts)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("creating"u8); writer.WriteNumberValue(Creating); writer.WritePropertyName("idle"u8); @@ -52,6 +60,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO writer.WriteNumberValue(Unusable); writer.WritePropertyName("waitingForStartTask"u8); writer.WriteNumberValue(WaitingForStartTask); + writer.WritePropertyName("deallocated"u8); + writer.WriteNumberValue(Deallocated); + writer.WritePropertyName("deallocating"u8); + writer.WriteNumberValue(Deallocating); writer.WritePropertyName("total"u8); writer.WriteNumberValue(Total); writer.WritePropertyName("upgradingOS"u8); @@ -71,7 +83,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO #endif } } - writer.WriteEndObject(); } BatchNodeCounts IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -107,6 +118,8 @@ internal static BatchNodeCounts DeserializeBatchNodeCounts(JsonElement element, int unknown = default; int unusable = default; int waitingForStartTask = default; + int deallocated = default; + int deallocating = default; int total = default; int upgradingOS = default; IDictionary serializedAdditionalRawData = default; @@ -178,6 +191,16 @@ internal static BatchNodeCounts DeserializeBatchNodeCounts(JsonElement element, waitingForStartTask = property.Value.GetInt32(); continue; } + if (property.NameEquals("deallocated"u8)) + { + deallocated = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("deallocating"u8)) + { + deallocating = property.Value.GetInt32(); + continue; + } if (property.NameEquals("total"u8)) { total = property.Value.GetInt32(); @@ -208,6 +231,8 @@ internal static BatchNodeCounts DeserializeBatchNodeCounts(JsonElement element, unknown, unusable, waitingForStartTask, + deallocated, + deallocating, total, upgradingOS, serializedAdditionalRawData); diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.cs index 56ccd9fa23738..04e5b38c071d2 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeCounts.cs @@ -59,9 +59,11 @@ public partial class BatchNodeCounts /// The number of Compute Nodes in the unknown state. /// The number of Compute Nodes in the unusable state. /// The number of Compute Nodes in the waitingForStartTask state. + /// The number of Compute Nodes in the deallocated state. + /// The number of Compute Nodes in the deallocating state. /// The total number of Compute Nodes. /// The number of Compute Nodes in the upgradingOS state. - internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int rebooting, int reimaging, int running, int starting, int startTaskFailed, int leavingPool, int unknown, int unusable, int waitingForStartTask, int total, int upgradingOs) + internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int rebooting, int reimaging, int running, int starting, int startTaskFailed, int leavingPool, int unknown, int unusable, int waitingForStartTask, int deallocated, int deallocating, int total, int upgradingOs) { Creating = creating; Idle = idle; @@ -76,6 +78,8 @@ internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int Unknown = unknown; Unusable = unusable; WaitingForStartTask = waitingForStartTask; + Deallocated = deallocated; + Deallocating = deallocating; Total = total; UpgradingOs = upgradingOs; } @@ -94,10 +98,12 @@ internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int /// The number of Compute Nodes in the unknown state. /// The number of Compute Nodes in the unusable state. /// The number of Compute Nodes in the waitingForStartTask state. + /// The number of Compute Nodes in the deallocated state. + /// The number of Compute Nodes in the deallocating state. /// The total number of Compute Nodes. /// The number of Compute Nodes in the upgradingOS state. /// Keeps track of any properties unknown to the library. - internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int rebooting, int reimaging, int running, int starting, int startTaskFailed, int leavingPool, int unknown, int unusable, int waitingForStartTask, int total, int upgradingOs, IDictionary serializedAdditionalRawData) + internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int rebooting, int reimaging, int running, int starting, int startTaskFailed, int leavingPool, int unknown, int unusable, int waitingForStartTask, int deallocated, int deallocating, int total, int upgradingOs, IDictionary serializedAdditionalRawData) { Creating = creating; Idle = idle; @@ -112,6 +118,8 @@ internal BatchNodeCounts(int creating, int idle, int offline, int preempted, int Unknown = unknown; Unusable = unusable; WaitingForStartTask = waitingForStartTask; + Deallocated = deallocated; + Deallocating = deallocating; Total = total; UpgradingOs = upgradingOs; _serializedAdditionalRawData = serializedAdditionalRawData; @@ -148,6 +156,10 @@ internal BatchNodeCounts() public int Unusable { get; } /// The number of Compute Nodes in the waitingForStartTask state. public int WaitingForStartTask { get; } + /// The number of Compute Nodes in the deallocated state. + public int Deallocated { get; } + /// The number of Compute Nodes in the deallocating state. + public int Deallocating { get; } /// The total number of Compute Nodes. public int Total { get; } /// The number of Compute Nodes in the upgradingOS state. diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.Serialization.cs new file mode 100644 index 0000000000000..aedc552556b72 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Compute.Batch +{ + public partial class BatchNodeDeallocateContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BatchNodeDeallocateContent)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(NodeDeallocateOption)) + { + writer.WritePropertyName("nodeDeallocateOption"u8); + writer.WriteStringValue(NodeDeallocateOption.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + BatchNodeDeallocateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BatchNodeDeallocateContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBatchNodeDeallocateContent(document.RootElement, options); + } + + internal static BatchNodeDeallocateContent DeserializeBatchNodeDeallocateContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + BatchNodeDeallocateOption? nodeDeallocateOption = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("nodeDeallocateOption"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + nodeDeallocateOption = new BatchNodeDeallocateOption(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new BatchNodeDeallocateContent(nodeDeallocateOption, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(BatchNodeDeallocateContent)} does not support writing '{options.Format}' format."); + } + } + + BatchNodeDeallocateContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeBatchNodeDeallocateContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BatchNodeDeallocateContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static BatchNodeDeallocateContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeBatchNodeDeallocateContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.cs new file mode 100644 index 0000000000000..c14a7b03f81e1 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateContent.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Compute.Batch +{ + /// Options for deallocating a Compute Node. + public partial class BatchNodeDeallocateContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public BatchNodeDeallocateContent() + { + } + + /// Initializes a new instance of . + /// When to deallocate the Compute Node and what to do with currently running Tasks. The default value is requeue. + /// Keeps track of any properties unknown to the library. + internal BatchNodeDeallocateContent(BatchNodeDeallocateOption? nodeDeallocateOption, IDictionary serializedAdditionalRawData) + { + NodeDeallocateOption = nodeDeallocateOption; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// When to deallocate the Compute Node and what to do with currently running Tasks. The default value is requeue. + public BatchNodeDeallocateOption? NodeDeallocateOption { get; set; } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateOption.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateOption.cs new file mode 100644 index 0000000000000..b4c883a187e59 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDeallocateOption.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Compute.Batch +{ + /// BatchNodeDeallocateOption enums. + public readonly partial struct BatchNodeDeallocateOption : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public BatchNodeDeallocateOption(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string RequeueValue = "requeue"; + private const string TerminateValue = "terminate"; + private const string TaskCompletionValue = "taskcompletion"; + private const string RetainedDataValue = "retaineddata"; + + /// Terminate running Task processes and requeue the Tasks. The Tasks will run again when a Compute Node is available. Deallocate the Compute Node as soon as Tasks have been terminated. + public static BatchNodeDeallocateOption Requeue { get; } = new BatchNodeDeallocateOption(RequeueValue); + /// Terminate running Tasks. The Tasks will be completed with failureInfo indicating that they were terminated, and will not run again. Deallocate the Compute Node as soon as Tasks have been terminated. + public static BatchNodeDeallocateOption Terminate { get; } = new BatchNodeDeallocateOption(TerminateValue); + /// Allow currently running Tasks to complete. Schedule no new Tasks while waiting. Deallocate the Compute Node when all Tasks have completed. + public static BatchNodeDeallocateOption TaskCompletion { get; } = new BatchNodeDeallocateOption(TaskCompletionValue); + /// Allow currently running Tasks to complete, then wait for all Task data retention periods to expire. Schedule no new Tasks while waiting. Deallocate the Compute Node when all Task retention periods have expired. + public static BatchNodeDeallocateOption RetainedData { get; } = new BatchNodeDeallocateOption(RetainedDataValue); + /// Determines if two values are the same. + public static bool operator ==(BatchNodeDeallocateOption left, BatchNodeDeallocateOption right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(BatchNodeDeallocateOption left, BatchNodeDeallocateOption right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator BatchNodeDeallocateOption(string value) => new BatchNodeDeallocateOption(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is BatchNodeDeallocateOption other && Equals(other); + /// + public bool Equals(BatchNodeDeallocateOption other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDisableSchedulingContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDisableSchedulingContent.Serialization.cs index ef8420b187398..64e887ab5c858 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDisableSchedulingContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeDisableSchedulingContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeDisableSchedulingContent : IUtf8JsonSerializable, void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, throw new FormatException($"The model {nameof(BatchNodeDisableSchedulingContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(NodeDisableSchedulingOption)) { writer.WritePropertyName("nodeDisableSchedulingOption"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, #endif } } - writer.WriteEndObject(); } BatchNodeDisableSchedulingContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeEndpointConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeEndpointConfiguration.Serialization.cs index 0afc6fc011764..904ea6996018b 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeEndpointConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeEndpointConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeEndpointConfiguration : IUtf8JsonSerializable, IJs void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mod throw new FormatException($"The model {nameof(BatchNodeEndpointConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("inboundEndpoints"u8); writer.WriteStartArray(); foreach (var item in InboundEndpoints) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mod #endif } } - writer.WriteEndObject(); } BatchNodeEndpointConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeError.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeError.Serialization.cs index c3c8b9f8c8eba..1cde5e8c6a1f9 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeError.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeError.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeError : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(BatchNodeError)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Code)) { writer.WritePropertyName("code"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } BatchNodeError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeFile.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeFile.Serialization.cs index 16ea064af291e..386b4e8c70478 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeFile.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeFile.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeFile : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(BatchNodeFile)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Name)) { writer.WritePropertyName("name"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } BatchNodeFile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeIdentityReference.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeIdentityReference.Serialization.cs index 2a9d604213552..91a6e8d431d0d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeIdentityReference.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeIdentityReference.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeIdentityReference : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchNodeIdentityReference)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(ResourceId)) { writer.WritePropertyName("resourceId"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchNodeIdentityReference IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeInfo.Serialization.cs index e15bde2933332..fe7b6c1b8fd18 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(BatchNodeInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(AffinityId)) { writer.WritePropertyName("affinityId"u8); @@ -71,7 +79,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } BatchNodeInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodePlacementConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodePlacementConfiguration.Serialization.cs index cfb6cad983dd2..bf6ab9efda707 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodePlacementConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodePlacementConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodePlacementConfiguration : IUtf8JsonSerializable, IJ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo throw new FormatException($"The model {nameof(BatchNodePlacementConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Policy)) { writer.WritePropertyName("policy"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo #endif } } - writer.WriteEndObject(); } BatchNodePlacementConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRebootContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRebootContent.Serialization.cs index 73dc3dfeb3de4..6949d68e2842a 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRebootContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRebootContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeRebootContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchNodeRebootContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(NodeRebootOption)) { writer.WritePropertyName("nodeRebootOption"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchNodeRebootContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.Serialization.cs new file mode 100644 index 0000000000000..66a50d339e1de --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Compute.Batch +{ + public partial class BatchNodeReimageContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BatchNodeReimageContent)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(NodeReimageOption)) + { + writer.WritePropertyName("nodeReimageOption"u8); + writer.WriteStringValue(NodeReimageOption.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + BatchNodeReimageContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BatchNodeReimageContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBatchNodeReimageContent(document.RootElement, options); + } + + internal static BatchNodeReimageContent DeserializeBatchNodeReimageContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + BatchNodeReimageOption? nodeReimageOption = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("nodeReimageOption"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + nodeReimageOption = new BatchNodeReimageOption(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new BatchNodeReimageContent(nodeReimageOption, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(BatchNodeReimageContent)} does not support writing '{options.Format}' format."); + } + } + + BatchNodeReimageContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeBatchNodeReimageContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BatchNodeReimageContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static BatchNodeReimageContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeBatchNodeReimageContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.cs new file mode 100644 index 0000000000000..8222990b7344d --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageContent.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Compute.Batch +{ + /// Parameters for reimaging an Azure Batch Compute Node. + public partial class BatchNodeReimageContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public BatchNodeReimageContent() + { + } + + /// Initializes a new instance of . + /// When to reimage the Compute Node and what to do with currently running Tasks. The default value is requeue. + /// Keeps track of any properties unknown to the library. + internal BatchNodeReimageContent(BatchNodeReimageOption? nodeReimageOption, IDictionary serializedAdditionalRawData) + { + NodeReimageOption = nodeReimageOption; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// When to reimage the Compute Node and what to do with currently running Tasks. The default value is requeue. + public BatchNodeReimageOption? NodeReimageOption { get; set; } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageOption.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageOption.cs new file mode 100644 index 0000000000000..1ceebbf462620 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeReimageOption.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Compute.Batch +{ + /// BatchNodeReimageOption enums. + public readonly partial struct BatchNodeReimageOption : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public BatchNodeReimageOption(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string RequeueValue = "requeue"; + private const string TerminateValue = "terminate"; + private const string TaskCompletionValue = "taskcompletion"; + private const string RetainedDataValue = "retaineddata"; + + /// Terminate running Task processes and requeue the Tasks. The Tasks will run again when a Compute Node is available. Reimage the Compute Node as soon as Tasks have been terminated. + public static BatchNodeReimageOption Requeue { get; } = new BatchNodeReimageOption(RequeueValue); + /// Terminate running Tasks. The Tasks will be completed with failureInfo indicating that they were terminated, and will not run again. Reimage the Compute Node as soon as Tasks have been terminated. + public static BatchNodeReimageOption Terminate { get; } = new BatchNodeReimageOption(TerminateValue); + /// Allow currently running Tasks to complete. Schedule no new Tasks while waiting. Reimage the Compute Node when all Tasks have completed. + public static BatchNodeReimageOption TaskCompletion { get; } = new BatchNodeReimageOption(TaskCompletionValue); + /// Allow currently running Tasks to complete, then wait for all Task data retention periods to expire. Schedule no new Tasks while waiting. Reimage the Compute Node when all Task retention periods have expired. + public static BatchNodeReimageOption RetainedData { get; } = new BatchNodeReimageOption(RetainedDataValue); + /// Determines if two values are the same. + public static bool operator ==(BatchNodeReimageOption left, BatchNodeReimageOption right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(BatchNodeReimageOption left, BatchNodeReimageOption right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator BatchNodeReimageOption(string value) => new BatchNodeReimageOption(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is BatchNodeReimageOption other && Equals(other); + /// + public bool Equals(BatchNodeReimageOption other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoteLoginSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoteLoginSettings.Serialization.cs index f9aec5b2571b6..64a97137a0980 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoteLoginSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoteLoginSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeRemoteLoginSettings : IUtf8JsonSerializable, IJson void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model throw new FormatException($"The model {nameof(BatchNodeRemoteLoginSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("remoteLoginIPAddress"u8); writer.WriteStringValue(RemoteLoginIpAddress); writer.WritePropertyName("remoteLoginPort"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model #endif } } - writer.WriteEndObject(); } BatchNodeRemoteLoginSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoveContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoveContent.Serialization.cs index 3828c539a2702..4a2c7cd95efda 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoveContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeRemoveContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeRemoveContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchNodeRemoveContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("nodeList"u8); writer.WriteStartArray(); foreach (var item in NodeList) @@ -58,7 +66,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchNodeRemoveContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeState.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeState.cs index b986f5ec1350e..71bfaaba250f7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeState.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeState.cs @@ -36,6 +36,8 @@ public BatchNodeState(string value) private const string OfflineValue = "offline"; private const string PreemptedValue = "preempted"; private const string UpgradingOSValue = "upgradingos"; + private const string DeallocatedValue = "deallocated"; + private const string DeallocatingValue = "deallocating"; /// The Compute Node is not currently running a Task. public static BatchNodeState Idle { get; } = new BatchNodeState(IdleValue); @@ -65,6 +67,10 @@ public BatchNodeState(string value) public static BatchNodeState Preempted { get; } = new BatchNodeState(PreemptedValue); /// The Compute Node is undergoing an OS upgrade operation. public static BatchNodeState UpgradingOS { get; } = new BatchNodeState(UpgradingOSValue); + /// The Compute Node is deallocated. + public static BatchNodeState Deallocated { get; } = new BatchNodeState(DeallocatedValue); + /// The Compute Node is deallocating. + public static BatchNodeState Deallocating { get; } = new BatchNodeState(DeallocatingValue); /// Determines if two values are the same. public static bool operator ==(BatchNodeState left, BatchNodeState right) => left.Equals(right); /// Determines if two values are not the same. diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.Serialization.cs index d48002248dba9..998f55e3dee53 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeUserCreateContent : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchNodeUserCreateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); if (Optional.IsDefined(IsAdmin)) @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchNodeUserCreateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.cs index 0f9fbcc2b1583..e3d6007ed5fd1 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserCreateContent.cs @@ -59,7 +59,7 @@ public BatchNodeUserCreateContent(string name) /// The user name of the Account. /// Whether the Account should be an administrator on the Compute Node. The default value is false. /// The time at which the Account should expire. If omitted, the default is 1 day from the current time. For Linux Compute Nodes, the expiryTime has a precision up to a day. - /// The password of the Account. The password is required for Windows Compute Nodes (those created with 'virtualMachineConfiguration' using a Windows Image reference). For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. + /// The password of the Account. The password is required for Windows Compute Nodes. For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. /// The SSH public key that can be used for remote login to the Compute Node. The public key should be compatible with OpenSSH encoding and should be base 64 encoded. This property can be specified only for Linux Compute Nodes. If this is specified for a Windows Compute Node, then the Batch service rejects the request; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). /// Keeps track of any properties unknown to the library. internal BatchNodeUserCreateContent(string name, bool? isAdmin, DateTimeOffset? expiryTime, string password, string sshPublicKey, IDictionary serializedAdditionalRawData) @@ -83,7 +83,7 @@ internal BatchNodeUserCreateContent() public bool? IsAdmin { get; set; } /// The time at which the Account should expire. If omitted, the default is 1 day from the current time. For Linux Compute Nodes, the expiryTime has a precision up to a day. public DateTimeOffset? ExpiryTime { get; set; } - /// The password of the Account. The password is required for Windows Compute Nodes (those created with 'virtualMachineConfiguration' using a Windows Image reference). For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. + /// The password of the Account. The password is required for Windows Compute Nodes. For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. public string Password { get; set; } /// The SSH public key that can be used for remote login to the Compute Node. The public key should be compatible with OpenSSH encoding and should be base 64 encoded. This property can be specified only for Linux Compute Nodes. If this is specified for a Windows Compute Node, then the Batch service rejects the request; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). public string SshPublicKey { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.Serialization.cs index e0bf13777869d..51eaf5a0ddf30 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeUserUpdateContent : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchNodeUserUpdateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Password)) { writer.WritePropertyName("password"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchNodeUserUpdateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.cs index 8ebac642c3d5c..9741f08b62085 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeUserUpdateContent.cs @@ -51,7 +51,7 @@ public BatchNodeUserUpdateContent() } /// Initializes a new instance of . - /// The password of the Account. The password is required for Windows Compute Nodes (those created with 'virtualMachineConfiguration' using a Windows Image reference). For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. If omitted, any existing password is removed. + /// The password of the Account. The password is required for Windows Compute Nodes. For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. If omitted, any existing password is removed. /// The time at which the Account should expire. If omitted, the default is 1 day from the current time. For Linux Compute Nodes, the expiryTime has a precision up to a day. /// The SSH public key that can be used for remote login to the Compute Node. The public key should be compatible with OpenSSH encoding and should be base 64 encoded. This property can be specified only for Linux Compute Nodes. If this is specified for a Windows Compute Node, then the Batch service rejects the request; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). If omitted, any existing SSH public key is removed. /// Keeps track of any properties unknown to the library. @@ -63,7 +63,7 @@ internal BatchNodeUserUpdateContent(string password, DateTimeOffset? expiryTime, _serializedAdditionalRawData = serializedAdditionalRawData; } - /// The password of the Account. The password is required for Windows Compute Nodes (those created with 'virtualMachineConfiguration' using a Windows Image reference). For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. If omitted, any existing password is removed. + /// The password of the Account. The password is required for Windows Compute Nodes. For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. If omitted, any existing password is removed. public string Password { get; set; } /// The time at which the Account should expire. If omitted, the default is 1 day from the current time. For Linux Compute Nodes, the expiryTime has a precision up to a day. public DateTimeOffset? ExpiryTime { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeVMExtension.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeVMExtension.Serialization.cs index fdf316f424931..000160cc4f070 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeVMExtension.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchNodeVMExtension.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchNodeVMExtension : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(BatchNodeVMExtension)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(ProvisioningState)) { writer.WritePropertyName("provisioningState"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } BatchNodeVMExtension IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.Serialization.cs index c006448060f22..197e6df6e27ac 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPool : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions throw new FormatException($"The model {nameof(BatchPool)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (options.Format != "W" && Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -257,7 +265,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions #endif } } - writer.WriteEndObject(); } BatchPool IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.cs index 4cbd3bee14783..0f1a83eac5d7b 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPool.cs @@ -67,7 +67,7 @@ internal BatchPool() /// The time at which the Pool entered its current state. /// Whether the Pool is resizing. /// The time at which the Pool entered its current allocation state. - /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes). + /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available VM sizes, see Sizes for Virtual Machines (Linux) (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/) or Sizes for Virtual Machines (Windows) (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/). Batch supports all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and STANDARD_DSV2 series). /// The virtual machine configuration for the Pool. This property must be specified. /// The timeout for allocation of Compute Nodes to the Pool. This is the timeout for the most recent resize operation. (The initial sizing when the Pool is created counts as a resize.) The default value is 15 minutes. /// A list of errors encountered while performing the last resize on the Pool. This property is set only if one or more errors occurred during the last Pool resize, and only when the Pool allocationState is Steady. @@ -88,7 +88,7 @@ internal BatchPool() /// How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. /// The list of user Accounts to be created on each Compute Node in the Pool. /// A list of name-value pairs associated with the Pool as metadata. - /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the CloudPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the BatchPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. /// A list of file systems to mount on each node in the pool. This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. /// The identity of the Batch pool, if configured. The list of user identities associated with the Batch pool. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. /// The desired node communication mode for the pool. If omitted, the default value is Default. @@ -157,7 +157,7 @@ internal BatchPool(string id, string displayName, string url, string eTag, DateT public AllocationState? AllocationState { get; } /// The time at which the Pool entered its current allocation state. public DateTimeOffset? AllocationStateTransitionTime { get; } - /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes). + /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available VM sizes, see Sizes for Virtual Machines (Linux) (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/) or Sizes for Virtual Machines (Windows) (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/). Batch supports all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and STANDARD_DSV2 series). public string VmSize { get; } /// The virtual machine configuration for the Pool. This property must be specified. public VirtualMachineConfiguration VirtualMachineConfiguration { get; } @@ -199,7 +199,7 @@ internal BatchPool(string id, string displayName, string url, string eTag, DateT public IReadOnlyList UserAccounts { get; } /// A list of name-value pairs associated with the Pool as metadata. public IReadOnlyList Metadata { get; } - /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the CloudPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the BatchPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. public BatchPoolStatistics Stats { get; } /// A list of file systems to mount on each node in the pool. This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. public IReadOnlyList MountConfiguration { get; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolCreateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolCreateContent.Serialization.cs index 6b99fdb0fe630..ca3d2f6bdf9b5 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolCreateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolCreateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolCreateContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchPoolCreateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (Optional.IsDefined(DisplayName)) @@ -171,7 +179,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchPoolCreateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.Serialization.cs index f4616dd380466..acaa6bb2ab978 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolEnableAutoScaleContent : IUtf8JsonSerializable, IJ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo throw new FormatException($"The model {nameof(BatchPoolEnableAutoScaleContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(AutoScaleFormula)) { writer.WritePropertyName("autoScaleFormula"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo #endif } } - writer.WriteEndObject(); } BatchPoolEnableAutoScaleContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.cs index 13446bd47d3c2..b3b15b6404d60 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEnableAutoScaleContent.cs @@ -51,7 +51,7 @@ public BatchPoolEnableAutoScaleContent() } /// Initializes a new instance of . - /// The formula for the desired number of Compute Nodes in the Pool. The formula is checked for validity before it is applied to the Pool. If the formula is not valid, the Batch service rejects the request with detailed error information. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling). + /// The formula for the desired number of Compute Nodes in the Pool. The default value is 15 minutes. The minimum and maximum value are 5 minutes and 168 hours respectively. If you specify a value less than 5 minutes or greater than 168 hours, the Batch service rejects the request with an invalid property value error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). If you specify a new interval, then the existing autoscale evaluation schedule will be stopped and a new autoscale evaluation schedule will be started, with its starting time being the time when this request was issued. /// The time interval at which to automatically adjust the Pool size according to the autoscale formula. The default value is 15 minutes. The minimum and maximum value are 5 minutes and 168 hours respectively. If you specify a value less than 5 minutes or greater than 168 hours, the Batch service rejects the request with an invalid property value error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). If you specify a new interval, then the existing autoscale evaluation schedule will be stopped and a new autoscale evaluation schedule will be started, with its starting time being the time when this request was issued. /// Keeps track of any properties unknown to the library. internal BatchPoolEnableAutoScaleContent(string autoScaleFormula, TimeSpan? autoScaleEvaluationInterval, IDictionary serializedAdditionalRawData) @@ -61,7 +61,7 @@ internal BatchPoolEnableAutoScaleContent(string autoScaleFormula, TimeSpan? auto _serializedAdditionalRawData = serializedAdditionalRawData; } - /// The formula for the desired number of Compute Nodes in the Pool. The formula is checked for validity before it is applied to the Pool. If the formula is not valid, the Batch service rejects the request with detailed error information. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling). + /// The formula for the desired number of Compute Nodes in the Pool. The default value is 15 minutes. The minimum and maximum value are 5 minutes and 168 hours respectively. If you specify a value less than 5 minutes or greater than 168 hours, the Batch service rejects the request with an invalid property value error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). If you specify a new interval, then the existing autoscale evaluation schedule will be stopped and a new autoscale evaluation schedule will be started, with its starting time being the time when this request was issued. public string AutoScaleFormula { get; set; } /// The time interval at which to automatically adjust the Pool size according to the autoscale formula. The default value is 15 minutes. The minimum and maximum value are 5 minutes and 168 hours respectively. If you specify a value less than 5 minutes or greater than 168 hours, the Batch service rejects the request with an invalid property value error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). If you specify a new interval, then the existing autoscale evaluation schedule will be stopped and a new autoscale evaluation schedule will be started, with its starting time being the time when this request was issued. public TimeSpan? AutoScaleEvaluationInterval { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEndpointConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEndpointConfiguration.Serialization.cs index ad9474bedb9e9..4dd5cbbff1031 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEndpointConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEndpointConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolEndpointConfiguration : IUtf8JsonSerializable, IJs void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mod throw new FormatException($"The model {nameof(BatchPoolEndpointConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("inboundNATPools"u8); writer.WriteStartArray(); foreach (var item in InboundNatPools) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mod #endif } } - writer.WriteEndObject(); } BatchPoolEndpointConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.Serialization.cs index 7f9ec19c737ee..07dbb62ecf701 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolEvaluateAutoScaleContent : IUtf8JsonSerializable, void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, throw new FormatException($"The model {nameof(BatchPoolEvaluateAutoScaleContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("autoScaleFormula"u8); writer.WriteStringValue(AutoScaleFormula); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, #endif } } - writer.WriteEndObject(); } BatchPoolEvaluateAutoScaleContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.cs index b89f8e7df1cdc..3e2d717a256ec 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolEvaluateAutoScaleContent.cs @@ -46,7 +46,7 @@ public partial class BatchPoolEvaluateAutoScaleContent private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling). + /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/documentation/articles/batch-automatic-scaling). /// is null. public BatchPoolEvaluateAutoScaleContent(string autoScaleFormula) { @@ -56,7 +56,7 @@ public BatchPoolEvaluateAutoScaleContent(string autoScaleFormula) } /// Initializes a new instance of . - /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling). + /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/documentation/articles/batch-automatic-scaling). /// Keeps track of any properties unknown to the library. internal BatchPoolEvaluateAutoScaleContent(string autoScaleFormula, IDictionary serializedAdditionalRawData) { @@ -69,7 +69,7 @@ internal BatchPoolEvaluateAutoScaleContent() { } - /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling). + /// The formula for the desired number of Compute Nodes in the Pool. The formula is validated and its results calculated, but it is not applied to the Pool. To apply the formula to the Pool, 'Enable automatic scaling on a Pool'. For more information about specifying this formula, see Automatically scale Compute Nodes in an Azure Batch Pool (https://azure.microsoft.com/documentation/articles/batch-automatic-scaling). public string AutoScaleFormula { get; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolIdentity.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolIdentity.Serialization.cs index 67f75f8105ea1..863bea8f55649 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolIdentity.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolIdentity.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolIdentity : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite throw new FormatException($"The model {nameof(BatchPoolIdentity)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("type"u8); writer.WriteStringValue(Type.ToString()); if (Optional.IsCollectionDefined(UserAssignedIdentities)) @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite #endif } } - writer.WriteEndObject(); } BatchPoolIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolInfo.Serialization.cs index dd367f46c8d33..7a8418eb7712b 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(BatchPoolInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(PoolId)) { writer.WritePropertyName("poolId"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } BatchPoolInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolNodeCounts.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolNodeCounts.Serialization.cs index cb3ec6b4e8072..fe375e92dd7b4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolNodeCounts.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolNodeCounts.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolNodeCounts : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchPoolNodeCounts)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("poolId"u8); writer.WriteStringValue(PoolId); if (Optional.IsDefined(Dedicated)) @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchPoolNodeCounts IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolReplaceContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolReplaceContent.Serialization.cs index c0746e2dcb459..b06410aa43089 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolReplaceContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolReplaceContent.Serialization.cs @@ -14,6 +14,57 @@ namespace Azure.Compute.Batch { public partial class BatchPoolReplaceContent : IUtf8JsonSerializable, IJsonModel { + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BatchPoolReplaceContent)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(StartTask)) + { + writer.WritePropertyName("startTask"u8); + writer.WriteObjectValue(StartTask, options); + } + writer.WritePropertyName("applicationPackageReferences"u8); + writer.WriteStartArray(); + foreach (var item in ApplicationPackageReferences) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(TargetNodeCommunicationMode)) + { + writer.WritePropertyName("targetNodeCommunicationMode"u8); + writer.WriteStringValue(TargetNodeCommunicationMode.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + BatchPoolReplaceContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResizeContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResizeContent.Serialization.cs index 2a68d6fcb75a0..12f7a1346d13a 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResizeContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResizeContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolResizeContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchPoolResizeContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(TargetDedicatedNodes)) { writer.WritePropertyName("targetDedicatedNodes"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchPoolResizeContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResourceStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResourceStatistics.Serialization.cs index 061b3ac2b785e..e6cf1fc49a5ac 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResourceStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolResourceStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolResourceStatistics : IUtf8JsonSerializable, IJsonM void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR throw new FormatException($"The model {nameof(BatchPoolResourceStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); writer.WritePropertyName("lastUpdateTime"u8); @@ -67,7 +75,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR #endif } } - writer.WriteEndObject(); } BatchPoolResourceStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.Serialization.cs index 293379cd7d853..c39d46d8a9e3d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolSpecification : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchPoolSpecification)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(DisplayName)) { writer.WritePropertyName("displayName"u8); @@ -163,7 +171,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchPoolSpecification IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.cs index 758fe6016c02a..de3caaf284f23 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolSpecification.cs @@ -62,7 +62,7 @@ public BatchPoolSpecification(string vmSize) /// Initializes a new instance of . /// The display name for the Pool. The display name need not be unique and can contain any Unicode characters up to a maximum length of 1024. /// The size of the virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes). - /// The virtual machine configuration for the Pool. This property must be specified if the Pool needs to be created with Azure IaaS VMs. If it is not specified then the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). + /// The virtual machine configuration for the Pool. This property must be specified. /// The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256. /// How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. /// The timeout for allocation of Compute Nodes to the Pool. This timeout applies only to manual scaling; it has no effect when enableAutoScale is set to true. The default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service rejects the request with an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). @@ -117,7 +117,7 @@ internal BatchPoolSpecification() public string DisplayName { get; set; } /// The size of the virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes). public string VmSize { get; set; } - /// The virtual machine configuration for the Pool. This property must be specified if the Pool needs to be created with Azure IaaS VMs. If it is not specified then the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). + /// The virtual machine configuration for the Pool. This property must be specified. public VirtualMachineConfiguration VirtualMachineConfiguration { get; set; } /// The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256. public int? TaskSlotsPerNode { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolStatistics.Serialization.cs index 8f3aaf2e74929..40bdfc87e3bcf 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolStatistics : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchPoolStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("url"u8); writer.WriteStringValue(Url); writer.WritePropertyName("startTime"u8); @@ -57,7 +65,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchPoolStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.Serialization.cs index 1bcfc367c703b..566169b705d09 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolUpdateContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,21 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchPoolUpdateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); + if (Optional.IsDefined(DisplayName)) + { + writer.WritePropertyName("displayName"u8); + writer.WriteStringValue(DisplayName); + } + if (Optional.IsDefined(VmSize)) + { + writer.WritePropertyName("vmSize"u8); + writer.WriteStringValue(VmSize); + } + if (Optional.IsDefined(EnableInterNodeCommunication)) + { + writer.WritePropertyName("enableInterNodeCommunication"u8); + writer.WriteBooleanValue(EnableInterNodeCommunication.Value); + } if (Optional.IsDefined(StartTask)) { writer.WritePropertyName("startTask"u8); @@ -51,11 +74,67 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader } writer.WriteEndArray(); } + if (Optional.IsDefined(VirtualMachineConfiguration)) + { + writer.WritePropertyName("virtualMachineConfiguration"u8); + writer.WriteObjectValue(VirtualMachineConfiguration, options); + } if (Optional.IsDefined(TargetNodeCommunicationMode)) { writer.WritePropertyName("targetNodeCommunicationMode"u8); writer.WriteStringValue(TargetNodeCommunicationMode.Value.ToString()); } + if (Optional.IsDefined(TaskSlotsPerNode)) + { + writer.WritePropertyName("taskSlotsPerNode"u8); + writer.WriteNumberValue(TaskSlotsPerNode.Value); + } + if (Optional.IsDefined(TaskSchedulingPolicy)) + { + writer.WritePropertyName("taskSchedulingPolicy"u8); + writer.WriteObjectValue(TaskSchedulingPolicy, options); + } + if (Optional.IsDefined(NetworkConfiguration)) + { + writer.WritePropertyName("networkConfiguration"u8); + writer.WriteObjectValue(NetworkConfiguration, options); + } + if (Optional.IsCollectionDefined(ResourceTags)) + { + writer.WritePropertyName("resourceTags"u8); + writer.WriteStartObject(); + foreach (var item in ResourceTags) + { + writer.WritePropertyName(item.Key); + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(UserAccounts)) + { + writer.WritePropertyName("userAccounts"u8); + writer.WriteStartArray(); + foreach (var item in UserAccounts) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(MountConfiguration)) + { + writer.WritePropertyName("mountConfiguration"u8); + writer.WriteStartArray(); + foreach (var item in MountConfiguration) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(UpgradePolicy)) + { + writer.WritePropertyName("upgradePolicy"u8); + writer.WriteObjectValue(UpgradePolicy, options); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -71,7 +150,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchPoolUpdateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -94,14 +172,44 @@ internal static BatchPoolUpdateContent DeserializeBatchPoolUpdateContent(JsonEle { return null; } + string displayName = default; + string vmSize = default; + bool? enableInterNodeCommunication = default; BatchStartTask startTask = default; IList applicationPackageReferences = default; IList metadata = default; + VirtualMachineConfiguration virtualMachineConfiguration = default; BatchNodeCommunicationMode? targetNodeCommunicationMode = default; + int? taskSlotsPerNode = default; + BatchTaskSchedulingPolicy taskSchedulingPolicy = default; + NetworkConfiguration networkConfiguration = default; + IDictionary resourceTags = default; + IList userAccounts = default; + IList mountConfiguration = default; + UpgradePolicy upgradePolicy = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) { + if (property.NameEquals("displayName"u8)) + { + displayName = property.Value.GetString(); + continue; + } + if (property.NameEquals("vmSize"u8)) + { + vmSize = property.Value.GetString(); + continue; + } + if (property.NameEquals("enableInterNodeCommunication"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableInterNodeCommunication = property.Value.GetBoolean(); + continue; + } if (property.NameEquals("startTask"u8)) { if (property.Value.ValueKind == JsonValueKind.Null) @@ -139,6 +247,15 @@ internal static BatchPoolUpdateContent DeserializeBatchPoolUpdateContent(JsonEle metadata = array; continue; } + if (property.NameEquals("virtualMachineConfiguration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + virtualMachineConfiguration = VirtualMachineConfiguration.DeserializeVirtualMachineConfiguration(property.Value, options); + continue; + } if (property.NameEquals("targetNodeCommunicationMode"u8)) { if (property.Value.ValueKind == JsonValueKind.Null) @@ -148,13 +265,107 @@ internal static BatchPoolUpdateContent DeserializeBatchPoolUpdateContent(JsonEle targetNodeCommunicationMode = new BatchNodeCommunicationMode(property.Value.GetString()); continue; } + if (property.NameEquals("taskSlotsPerNode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + taskSlotsPerNode = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("taskSchedulingPolicy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + taskSchedulingPolicy = BatchTaskSchedulingPolicy.DeserializeBatchTaskSchedulingPolicy(property.Value, options); + continue; + } + if (property.NameEquals("networkConfiguration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + networkConfiguration = NetworkConfiguration.DeserializeNetworkConfiguration(property.Value, options); + continue; + } + if (property.NameEquals("resourceTags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + resourceTags = dictionary; + continue; + } + if (property.NameEquals("userAccounts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(UserAccount.DeserializeUserAccount(item, options)); + } + userAccounts = array; + continue; + } + if (property.NameEquals("mountConfiguration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Batch.MountConfiguration.DeserializeMountConfiguration(item, options)); + } + mountConfiguration = array; + continue; + } + if (property.NameEquals("upgradePolicy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + upgradePolicy = UpgradePolicy.DeserializeUpgradePolicy(property.Value, options); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new BatchPoolUpdateContent(startTask, applicationPackageReferences ?? new ChangeTrackingList(), metadata ?? new ChangeTrackingList(), targetNodeCommunicationMode, serializedAdditionalRawData); + return new BatchPoolUpdateContent( + displayName, + vmSize, + enableInterNodeCommunication, + startTask, + applicationPackageReferences ?? new ChangeTrackingList(), + metadata ?? new ChangeTrackingList(), + virtualMachineConfiguration, + targetNodeCommunicationMode, + taskSlotsPerNode, + taskSchedulingPolicy, + networkConfiguration, + resourceTags ?? new ChangeTrackingDictionary(), + userAccounts ?? new ChangeTrackingList(), + mountConfiguration ?? new ChangeTrackingList(), + upgradePolicy, + serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.cs index 1300eb052c5c8..e6f6da6ec2dd1 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUpdateContent.cs @@ -50,30 +50,77 @@ public BatchPoolUpdateContent() { ApplicationPackageReferences = new ChangeTrackingList(); Metadata = new ChangeTrackingList(); + ResourceTags = new ChangeTrackingDictionary(); + UserAccounts = new ChangeTrackingList(); + MountConfiguration = new ChangeTrackingList(); } /// Initializes a new instance of . + /// The display name for the Pool. The display name need not be unique and can contain any Unicode characters up to a maximum length of 1024. This field can be updated only when the pool is empty. + /// The size of virtual machines in the Pool. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes).<br /><br />This field can be updated only when the pool is empty. + /// Whether the Pool permits direct communication between Compute Nodes. Enabling inter-node communication limits the maximum size of the Pool due to deployment restrictions on the Compute Nodes of the Pool. This may result in the Pool not reaching its desired size. The default value is false.<br /><br />This field can be updated only when the pool is empty. /// A Task to run on each Compute Node as it joins the Pool. The Task runs when the Compute Node is added to the Pool or when the Compute Node is restarted. If this element is present, it overwrites any existing StartTask. If omitted, any existing StartTask is left unchanged. /// A list of Packages to be installed on each Compute Node in the Pool. Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. If this element is present, it replaces any existing Package references. If you specify an empty collection, then all Package references are removed from the Pool. If omitted, any existing Package references are left unchanged. /// A list of name-value pairs associated with the Pool as metadata. If this element is present, it replaces any existing metadata configured on the Pool. If you specify an empty collection, any metadata is removed from the Pool. If omitted, any existing metadata is left unchanged. + /// The virtual machine configuration for the Pool. This property must be specified.<br /><br />This field can be updated only when the pool is empty. /// The desired node communication mode for the pool. If this element is present, it replaces the existing targetNodeCommunicationMode configured on the Pool. If omitted, any existing metadata is left unchanged. + /// The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256.<br /><br />This field can be updated only when the pool is empty. + /// How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread.<br /><br />This field can be updated only when the pool is empty. + /// The network configuration for the Pool. This field can be updated only when the pool is empty. + /// The user-specified tags associated with the pool. The user-defined tags to be associated with the Azure Batch Pool. When specified, these tags are propagated to the backing Azure resources associated with the pool. This property can only be specified when the Batch account was created with the poolAllocationMode property set to 'UserSubscription'.<br /><br />This field can be updated only when the pool is empty. + /// The list of user Accounts to be created on each Compute Node in the Pool. This field can be updated only when the pool is empty. + /// Mount storage using specified file system for the entire lifetime of the pool. Mount the storage using Azure fileshare, NFS, CIFS or Blobfuse based file system.<br /><br />This field can be updated only when the pool is empty. + /// The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling.<br /><br />This field can be updated only when the pool is empty. /// Keeps track of any properties unknown to the library. - internal BatchPoolUpdateContent(BatchStartTask startTask, IList applicationPackageReferences, IList metadata, BatchNodeCommunicationMode? targetNodeCommunicationMode, IDictionary serializedAdditionalRawData) + internal BatchPoolUpdateContent(string displayName, string vmSize, bool? enableInterNodeCommunication, BatchStartTask startTask, IList applicationPackageReferences, IList metadata, VirtualMachineConfiguration virtualMachineConfiguration, BatchNodeCommunicationMode? targetNodeCommunicationMode, int? taskSlotsPerNode, BatchTaskSchedulingPolicy taskSchedulingPolicy, NetworkConfiguration networkConfiguration, IDictionary resourceTags, IList userAccounts, IList mountConfiguration, UpgradePolicy upgradePolicy, IDictionary serializedAdditionalRawData) { + DisplayName = displayName; + VmSize = vmSize; + EnableInterNodeCommunication = enableInterNodeCommunication; StartTask = startTask; ApplicationPackageReferences = applicationPackageReferences; Metadata = metadata; + VirtualMachineConfiguration = virtualMachineConfiguration; TargetNodeCommunicationMode = targetNodeCommunicationMode; + TaskSlotsPerNode = taskSlotsPerNode; + TaskSchedulingPolicy = taskSchedulingPolicy; + NetworkConfiguration = networkConfiguration; + ResourceTags = resourceTags; + UserAccounts = userAccounts; + MountConfiguration = mountConfiguration; + UpgradePolicy = upgradePolicy; _serializedAdditionalRawData = serializedAdditionalRawData; } + /// The display name for the Pool. The display name need not be unique and can contain any Unicode characters up to a maximum length of 1024. This field can be updated only when the pool is empty. + public string DisplayName { get; set; } + /// The size of virtual machines in the Pool. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes).<br /><br />This field can be updated only when the pool is empty. + public string VmSize { get; set; } + /// Whether the Pool permits direct communication between Compute Nodes. Enabling inter-node communication limits the maximum size of the Pool due to deployment restrictions on the Compute Nodes of the Pool. This may result in the Pool not reaching its desired size. The default value is false.<br /><br />This field can be updated only when the pool is empty. + public bool? EnableInterNodeCommunication { get; set; } /// A Task to run on each Compute Node as it joins the Pool. The Task runs when the Compute Node is added to the Pool or when the Compute Node is restarted. If this element is present, it overwrites any existing StartTask. If omitted, any existing StartTask is left unchanged. public BatchStartTask StartTask { get; set; } /// A list of Packages to be installed on each Compute Node in the Pool. Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. If this element is present, it replaces any existing Package references. If you specify an empty collection, then all Package references are removed from the Pool. If omitted, any existing Package references are left unchanged. public IList ApplicationPackageReferences { get; } /// A list of name-value pairs associated with the Pool as metadata. If this element is present, it replaces any existing metadata configured on the Pool. If you specify an empty collection, any metadata is removed from the Pool. If omitted, any existing metadata is left unchanged. public IList Metadata { get; } + /// The virtual machine configuration for the Pool. This property must be specified.<br /><br />This field can be updated only when the pool is empty. + public VirtualMachineConfiguration VirtualMachineConfiguration { get; set; } /// The desired node communication mode for the pool. If this element is present, it replaces the existing targetNodeCommunicationMode configured on the Pool. If omitted, any existing metadata is left unchanged. public BatchNodeCommunicationMode? TargetNodeCommunicationMode { get; set; } + /// The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256.<br /><br />This field can be updated only when the pool is empty. + public int? TaskSlotsPerNode { get; set; } + /// How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread.<br /><br />This field can be updated only when the pool is empty. + public BatchTaskSchedulingPolicy TaskSchedulingPolicy { get; set; } + /// The network configuration for the Pool. This field can be updated only when the pool is empty. + public NetworkConfiguration NetworkConfiguration { get; set; } + /// The user-specified tags associated with the pool. The user-defined tags to be associated with the Azure Batch Pool. When specified, these tags are propagated to the backing Azure resources associated with the pool. This property can only be specified when the Batch account was created with the poolAllocationMode property set to 'UserSubscription'.<br /><br />This field can be updated only when the pool is empty. + public IDictionary ResourceTags { get; } + /// The list of user Accounts to be created on each Compute Node in the Pool. This field can be updated only when the pool is empty. + public IList UserAccounts { get; } + /// Mount storage using specified file system for the entire lifetime of the pool. Mount the storage using Azure fileshare, NFS, CIFS or Blobfuse based file system.<br /><br />This field can be updated only when the pool is empty. + public IList MountConfiguration { get; } + /// The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling.<br /><br />This field can be updated only when the pool is empty. + public UpgradePolicy UpgradePolicy { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageMetrics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageMetrics.Serialization.cs index baa7f92a088de..a04cf7e7b8e38 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageMetrics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageMetrics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolUsageMetrics : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchPoolUsageMetrics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("poolId"u8); writer.WriteStringValue(PoolId); writer.WritePropertyName("startTime"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchPoolUsageMetrics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageStatistics.Serialization.cs index 3643fcc75cebf..4a8ee9996b9b4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchPoolUsageStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchPoolUsageStatistics : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(BatchPoolUsageStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("startTime"u8); writer.WriteStringValue(StartTime, "O"); writer.WritePropertyName("lastUpdateTime"u8); @@ -47,7 +55,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } BatchPoolUsageStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.Serialization.cs index d2d681b9cd1df..4782724060ba7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchStartTask : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(BatchStartTask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("commandLine"u8); writer.WriteStringValue(CommandLine); if (Optional.IsDefined(ContainerSettings)) @@ -83,7 +91,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } BatchStartTask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.cs index d83549ff06989..c0a56159c7a4a 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTask.cs @@ -60,7 +60,7 @@ public partial class BatchStartTask private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// is null. public BatchStartTask(string commandLine) { @@ -72,7 +72,7 @@ public BatchStartTask(string commandLine) } /// Initializes a new instance of . - /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the StartTask runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. Files listed under this element are located in the Task's working directory. /// A list of environment variable settings for the StartTask. @@ -97,7 +97,7 @@ internal BatchStartTask() { } - /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the StartTask. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). public string CommandLine { get; set; } /// The settings for the container under which the StartTask runs. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. public BatchTaskContainerSettings ContainerSettings { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTaskInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTaskInfo.Serialization.cs index 6f0dd8e100dfc..bc4967a342582 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTaskInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchStartTaskInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchStartTaskInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(BatchStartTaskInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("state"u8); writer.WriteStringValue(State.ToString()); writer.WritePropertyName("startTime"u8); @@ -77,7 +85,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } BatchStartTaskInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSubtask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSubtask.Serialization.cs index a5fc546e0d6b1..1dd289e6306b7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSubtask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSubtask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchSubtask : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(BatchSubtask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -101,7 +109,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } BatchSubtask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSupportedImage.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSupportedImage.Serialization.cs index 18ce9f3f0a497..f840d99b62de7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSupportedImage.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchSupportedImage.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchSupportedImage : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchSupportedImage)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("nodeAgentSKUId"u8); writer.WriteStringValue(NodeAgentSkuId); writer.WritePropertyName("imageReference"u8); @@ -64,7 +72,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchSupportedImage IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.Serialization.cs index 1c82899a42e69..063f039038ad3 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTask : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions throw new FormatException($"The model {nameof(BatchTask)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (options.Format != "W" && Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -196,7 +204,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions #endif } } - writer.WriteEndObject(); } BatchTask IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.cs index 3f38ef61f36d9..a55937218a2ca 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTask.cs @@ -76,7 +76,7 @@ public BatchTask() /// The time at which the Task entered its current state. /// The previous state of the Task. This property is not set if the Task is in its initial Active state. /// The time at which the Task entered its previous state. This property is not set if the Task is in its initial Active state. - /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the Task runs. If the Pool that will run this Task has containerConfiguration set, this must be set as well. If the Pool that will run this Task doesn't have containerConfiguration set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. For multi-instance Tasks, the resource files will only be downloaded to the Compute Node on which the primary Task is executed. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. /// A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. @@ -147,7 +147,7 @@ internal BatchTask(string id, string displayName, string url, string eTag, DateT public BatchTaskState? PreviousState { get; } /// The time at which the Task entered its previous state. This property is not set if the Task is in its initial Active state. public DateTimeOffset? PreviousStateTransitionTime { get; } - /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). public string CommandLine { get; } /// The settings for the container under which the Task runs. If the Pool that will run this Task has containerConfiguration set, this must be set as well. If the Pool that will run this Task doesn't have containerConfiguration set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. public BatchTaskContainerSettings ContainerSettings { get; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddCollectionResult.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddCollectionResult.Serialization.cs index 68facdb5d556f..cc0d96ca5629f 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddCollectionResult.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddCollectionResult.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskAddCollectionResult : IUtf8JsonSerializable, IJson void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model throw new FormatException($"The model {nameof(BatchTaskAddCollectionResult)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsCollectionDefined(Value)) { writer.WritePropertyName("value"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model #endif } } - writer.WriteEndObject(); } BatchTaskAddCollectionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddResult.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddResult.Serialization.cs index b39d20c111bc2..93a5e4a4ef272 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddResult.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskAddResult.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskAddResult : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(BatchTaskAddResult)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("status"u8); writer.WriteStringValue(Status.ToString()); writer.WritePropertyName("taskId"u8); @@ -65,7 +73,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } BatchTaskAddResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskConstraints.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskConstraints.Serialization.cs index 21a6503e72288..87bf8a7ae9de4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskConstraints.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskConstraints.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskConstraints : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(BatchTaskConstraints)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(MaxWallClockTime)) { writer.WritePropertyName("maxWallClockTime"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } BatchTaskConstraints IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerExecutionInfo.Serialization.cs index e3a1d872a7718..97d508809aa9d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskContainerExecutionInfo : IUtf8JsonSerializable, IJ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo throw new FormatException($"The model {nameof(BatchTaskContainerExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(ContainerId)) { writer.WritePropertyName("containerId"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mo #endif } } - writer.WriteEndObject(); } BatchTaskContainerExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.Serialization.cs index df8670ff83d07..b7d45b7c030ae 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskContainerSettings : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(BatchTaskContainerSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(ContainerRunOptions)) { writer.WritePropertyName("containerRunOptions"u8); @@ -43,6 +51,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe writer.WritePropertyName("workingDirectory"u8); writer.WriteStringValue(WorkingDirectory.Value.ToString()); } + if (Optional.IsCollectionDefined(ContainerHostBatchBindMounts)) + { + writer.WritePropertyName("containerHostBatchBindMounts"u8); + writer.WriteStartArray(); + foreach (var item in ContainerHostBatchBindMounts) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -58,7 +76,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } BatchTaskContainerSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -85,6 +102,7 @@ internal static BatchTaskContainerSettings DeserializeBatchTaskContainerSettings string imageName = default; ContainerRegistryReference registry = default; ContainerWorkingDirectory? workingDirectory = default; + IList containerHostBatchBindMounts = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -117,13 +135,33 @@ internal static BatchTaskContainerSettings DeserializeBatchTaskContainerSettings workingDirectory = new ContainerWorkingDirectory(property.Value.GetString()); continue; } + if (property.NameEquals("containerHostBatchBindMounts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(ContainerHostBatchBindMountEntry.DeserializeContainerHostBatchBindMountEntry(item, options)); + } + containerHostBatchBindMounts = array; + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new BatchTaskContainerSettings(containerRunOptions, imageName, registry, workingDirectory, serializedAdditionalRawData); + return new BatchTaskContainerSettings( + containerRunOptions, + imageName, + registry, + workingDirectory, + containerHostBatchBindMounts ?? new ChangeTrackingList(), + serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.cs index 9ecfdf09e22b8..f528edcc87530 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskContainerSettings.cs @@ -53,6 +53,7 @@ public BatchTaskContainerSettings(string imageName) Argument.AssertNotNull(imageName, nameof(imageName)); ImageName = imageName; + ContainerHostBatchBindMounts = new ChangeTrackingList(); } /// Initializes a new instance of . @@ -60,13 +61,15 @@ public BatchTaskContainerSettings(string imageName) /// The Image to use to create the container in which the Task will run. This is the full Image reference, as would be specified to "docker pull". If no tag is provided as part of the Image name, the tag ":latest" is used as a default. /// The private registry which contains the container Image. This setting can be omitted if was already provided at Pool creation. /// The location of the container Task working directory. The default is 'taskWorkingDirectory'. + /// The paths you want to mounted to container task. If this array is null or be not present, container task will mount entire temporary disk drive in windows (or AZ_BATCH_NODE_ROOT_DIR in Linux). It won't' mount any data paths into container if this array is set as empty. /// Keeps track of any properties unknown to the library. - internal BatchTaskContainerSettings(string containerRunOptions, string imageName, ContainerRegistryReference registry, ContainerWorkingDirectory? workingDirectory, IDictionary serializedAdditionalRawData) + internal BatchTaskContainerSettings(string containerRunOptions, string imageName, ContainerRegistryReference registry, ContainerWorkingDirectory? workingDirectory, IList containerHostBatchBindMounts, IDictionary serializedAdditionalRawData) { ContainerRunOptions = containerRunOptions; ImageName = imageName; Registry = registry; WorkingDirectory = workingDirectory; + ContainerHostBatchBindMounts = containerHostBatchBindMounts; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -83,5 +86,7 @@ internal BatchTaskContainerSettings() public ContainerRegistryReference Registry { get; set; } /// The location of the container Task working directory. The default is 'taskWorkingDirectory'. public ContainerWorkingDirectory? WorkingDirectory { get; set; } + /// The paths you want to mounted to container task. If this array is null or be not present, container task will mount entire temporary disk drive in windows (or AZ_BATCH_NODE_ROOT_DIR in Linux). It won't' mount any data paths into container if this array is set as empty. + public IList ContainerHostBatchBindMounts { get; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCounts.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCounts.Serialization.cs index ac91cee387b9b..e669347a471db 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCounts.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCounts.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskCounts : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO throw new FormatException($"The model {nameof(BatchTaskCounts)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("active"u8); writer.WriteNumberValue(Active); writer.WritePropertyName("running"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO #endif } } - writer.WriteEndObject(); } BatchTaskCounts IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCountsResult.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCountsResult.Serialization.cs index 391729746f972..095234940cf42 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCountsResult.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCountsResult.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskCountsResult : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchTaskCountsResult)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("taskCounts"u8); writer.WriteObjectValue(TaskCounts, options); writer.WritePropertyName("taskSlotCounts"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchTaskCountsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCreateContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCreateContent.Serialization.cs index 6481923dc5851..4ad422ca79a95 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCreateContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskCreateContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskCreateContent : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchTaskCreateContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (Optional.IsDefined(DisplayName)) @@ -135,7 +143,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchTaskCreateContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskDependencies.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskDependencies.Serialization.cs index f60524791f391..d73537db4ae38 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskDependencies.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskDependencies.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskDependencies : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(BatchTaskDependencies)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsCollectionDefined(TaskIds)) { writer.WritePropertyName("taskIds"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } BatchTaskDependencies IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskExecutionInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskExecutionInfo.Serialization.cs index 37d904a938704..38d01a513627e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskExecutionInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskExecutionInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskExecutionInfo : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(BatchTaskExecutionInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(StartTime)) { writer.WritePropertyName("startTime"u8); @@ -85,7 +93,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } BatchTaskExecutionInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskFailureInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskFailureInfo.Serialization.cs index 2d5f3ce06d382..58e6a3e4ecf03 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskFailureInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskFailureInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskFailureInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(BatchTaskFailureInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("category"u8); writer.WriteStringValue(Category.ToString()); if (Optional.IsDefined(Code)) @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } BatchTaskFailureInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskGroup.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskGroup.Serialization.cs index e6a413291e88b..9236a9af3a42a 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskGroup.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskGroup.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskGroup : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(BatchTaskGroup)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("value"u8); writer.WriteStartArray(); foreach (var item in Value) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } BatchTaskGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskIdRange.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskIdRange.Serialization.cs index 939c14d5d8396..d7e27a4c4621c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskIdRange.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskIdRange.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskIdRange : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter throw new FormatException($"The model {nameof(BatchTaskIdRange)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("start"u8); writer.WriteNumberValue(Start); writer.WritePropertyName("end"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter #endif } } - writer.WriteEndObject(); } BatchTaskIdRange IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskInfo.Serialization.cs index ecc2b0cc77622..fce0c00d4e168 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(BatchTaskInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(TaskUrl)) { writer.WritePropertyName("taskUrl"u8); @@ -68,7 +76,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } BatchTaskInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSchedulingPolicy.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSchedulingPolicy.Serialization.cs index 418c684f12401..efa4fc550503d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSchedulingPolicy.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSchedulingPolicy.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskSchedulingPolicy : IUtf8JsonSerializable, IJsonMod void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRea throw new FormatException($"The model {nameof(BatchTaskSchedulingPolicy)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("nodeFillType"u8); writer.WriteStringValue(NodeFillType.ToString()); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRea #endif } } - writer.WriteEndObject(); } BatchTaskSchedulingPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSlotCounts.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSlotCounts.Serialization.cs index 0ff4c5b62c720..3d92f7586cfc0 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSlotCounts.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskSlotCounts.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskSlotCounts : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchTaskSlotCounts)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("active"u8); writer.WriteNumberValue(Active); writer.WritePropertyName("running"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchTaskSlotCounts IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskStatistics.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskStatistics.Serialization.cs index 550e7bb1eefef..d4f28aada2d92 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskStatistics.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/BatchTaskStatistics.Serialization.cs @@ -18,6 +18,15 @@ public partial class BatchTaskStatistics : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(BatchTaskStatistics)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("url"u8); writer.WriteStringValue(Url); writer.WritePropertyName("startTime"u8); @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWri #endif } } - writer.WriteEndObject(); } BatchTaskStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/CifsMountConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/CifsMountConfiguration.Serialization.cs index e7c24edf8cc06..34f48187f6e76 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/CifsMountConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/CifsMountConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class CifsMountConfiguration : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(CifsMountConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("username"u8); writer.WriteStringValue(Username); writer.WritePropertyName("source"u8); @@ -54,7 +62,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } CifsMountConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ComputeBatchModelFactory.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ComputeBatchModelFactory.cs index 51e40bc9f43a2..dc143459297b4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ComputeBatchModelFactory.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ComputeBatchModelFactory.cs @@ -139,8 +139,10 @@ public static BatchPoolCreateContent BatchPoolCreateContent(string id = null, st /// The version of the Azure Virtual Machines Marketplace Image. A value of 'latest' can be specified to select the latest version of an Image. If omitted, the default is 'latest'. /// The ARM resource identifier of the Azure Compute Gallery Image. Compute Nodes in the Pool will be created using this Image Id. This is of the form /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/galleries/{galleryName}/images/{imageDefinitionName}/versions/{VersionId} or /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/galleries/{galleryName}/images/{imageDefinitionName} for always defaulting to the latest image version. This property is mutually exclusive with other ImageReference properties. The Azure Compute Gallery Image must have replicas in the same region and must be in the same subscription as the Azure Batch account. If the image version is not specified in the imageId, the latest version will be used. For information about the firewall settings for the Batch Compute Node agent to communicate with the Batch service see https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. /// The specific version of the platform image or marketplace image used to create the node. This read-only field differs from 'version' only if the value specified for 'version' when the pool was created was 'latest'. + /// The shared gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from shared gallery image GET call. + /// The community gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from community gallery image GET call. /// A new instance for mocking. - public static ImageReference ImageReference(string publisher = null, string offer = null, string sku = null, string version = null, string virtualMachineImageId = null, string exactVersion = null) + public static ImageReference ImageReference(string publisher = null, string offer = null, string sku = null, string version = null, string virtualMachineImageId = null, string exactVersion = null, string sharedGalleryImageId = null, string communityGalleryImageId = null) { return new ImageReference( publisher, @@ -149,6 +151,8 @@ public static ImageReference ImageReference(string publisher = null, string offe version, virtualMachineImageId, exactVersion, + sharedGalleryImageId, + communityGalleryImageId, serializedAdditionalRawData: null); } @@ -163,7 +167,7 @@ public static ImageReference ImageReference(string publisher = null, string offe /// The time at which the Pool entered its current state. /// Whether the Pool is resizing. /// The time at which the Pool entered its current allocation state. - /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available sizes of virtual machines in Pools, see Choose a VM size for Compute Nodes in an Azure Batch Pool (https://docs.microsoft.com/azure/batch/batch-pool-vm-sizes). + /// The size of virtual machines in the Pool. All virtual machines in a Pool are the same size. For information about available VM sizes, see Sizes for Virtual Machines (Linux) (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/) or Sizes for Virtual Machines (Windows) (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/). Batch supports all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and STANDARD_DSV2 series). /// The virtual machine configuration for the Pool. This property must be specified. /// The timeout for allocation of Compute Nodes to the Pool. This is the timeout for the most recent resize operation. (The initial sizing when the Pool is created counts as a resize.) The default value is 15 minutes. /// A list of errors encountered while performing the last resize on the Pool. This property is set only if one or more errors occurred during the last Pool resize, and only when the Pool allocationState is Steady. @@ -184,7 +188,7 @@ public static ImageReference ImageReference(string publisher = null, string offe /// How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. /// The list of user Accounts to be created on each Compute Node in the Pool. /// A list of name-value pairs associated with the Pool as metadata. - /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the CloudPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the BatchPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. /// A list of file systems to mount on each node in the pool. This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. /// The identity of the Batch pool, if configured. The list of user identities associated with the Batch pool. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. /// The desired node communication mode for the pool. If omitted, the default value is Default. @@ -413,10 +417,12 @@ public static BatchPoolNodeCounts BatchPoolNodeCounts(string poolId = null, Batc /// The number of Compute Nodes in the unknown state. /// The number of Compute Nodes in the unusable state. /// The number of Compute Nodes in the waitingForStartTask state. + /// The number of Compute Nodes in the deallocated state. + /// The number of Compute Nodes in the deallocating state. /// The total number of Compute Nodes. /// The number of Compute Nodes in the upgradingOS state. /// A new instance for mocking. - public static BatchNodeCounts BatchNodeCounts(int creating = default, int idle = default, int offline = default, int preempted = default, int rebooting = default, int reimaging = default, int running = default, int starting = default, int startTaskFailed = default, int leavingPool = default, int unknown = default, int unusable = default, int waitingForStartTask = default, int total = default, int upgradingOs = default) + public static BatchNodeCounts BatchNodeCounts(int creating = default, int idle = default, int offline = default, int preempted = default, int rebooting = default, int reimaging = default, int running = default, int starting = default, int startTaskFailed = default, int leavingPool = default, int unknown = default, int unusable = default, int waitingForStartTask = default, int deallocated = default, int deallocating = default, int total = default, int upgradingOs = default) { return new BatchNodeCounts( creating, @@ -432,6 +438,8 @@ public static BatchNodeCounts BatchNodeCounts(int creating = default, int idle = unknown, unusable, waitingForStartTask, + deallocated, + deallocating, total, upgradingOs, serializedAdditionalRawData: null); @@ -463,7 +471,7 @@ public static BatchNodeCounts BatchNodeCounts(int creating = default, int idle = /// The network configuration for the Job. /// A list of name-value pairs associated with the Job as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. /// The execution information for the Job. - /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the CloudJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. + /// Resource usage statistics for the entire lifetime of the Job. This property is populated only if the BatchJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. /// A new instance for mocking. public static BatchJob BatchJob(string id = null, string displayName = null, bool? usesTaskDependencies = null, string url = null, string eTag = null, DateTimeOffset? lastModified = null, DateTimeOffset? creationTime = null, BatchJobState? state = null, DateTimeOffset? stateTransitionTime = null, BatchJobState? previousState = null, DateTimeOffset? previousStateTransitionTime = null, int? priority = null, bool? allowTaskPreemption = null, int? maxParallelTasks = null, BatchJobConstraints constraints = null, BatchJobManagerTask jobManagerTask = null, BatchJobPreparationTask jobPreparationTask = null, BatchJobReleaseTask jobReleaseTask = null, IEnumerable commonEnvironmentSettings = null, BatchPoolInfo poolInfo = null, OnAllBatchTasksComplete? onAllTasksComplete = null, OnBatchTaskFailure? onTaskFailure = null, BatchJobNetworkConfiguration networkConfiguration = null, IEnumerable metadata = null, BatchJobExecutionInfo executionInfo = null, BatchJobStatistics stats = null) { @@ -924,7 +932,7 @@ public static BatchTaskCreateContent BatchTaskCreateContent(string id = null, st /// The time at which the Task entered its current state. /// The previous state of the Task. This property is not set if the Task is in its initial Active state. /// The time at which the Task entered its previous state. This property is not set if the Task is in its initial Active state. - /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables). + /// The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment variable expansion. If you want to take advantage of such features, you should invoke the shell in the command line, for example using "cmd /c MyCommand" in Windows or "/bin/sh -c MyCommand" in Linux. If the command line refers to file paths, it should use a relative path (relative to the Task working directory), or use the Batch provided environment variable (https://docs.microsoft.com/azure/batch/batch-compute-node-environment-variables). /// The settings for the container under which the Task runs. If the Pool that will run this Task has containerConfiguration set, this must be set as well. If the Pool that will run this Task doesn't have containerConfiguration set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) are mapped into the container, all Task environment variables are mapped into the container, and the Task command line is executed in the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. /// A list of files that the Batch service will download to the Compute Node before running the command line. For multi-instance Tasks, the resource files will only be downloaded to the Compute Node on which the primary Task is executed. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. /// A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. @@ -1137,7 +1145,7 @@ public static BatchNodeFile BatchNodeFile(string name = null, string url = null, /// The content type of the file. /// The file mode attribute in octal format. The file mode is returned only for files on Linux Compute Nodes. /// A new instance for mocking. - public static FileProperties FileProperties(DateTimeOffset? creationTime = null, DateTimeOffset lastModified = default, long contentLength = default, string contentType = null, string fileMode = null) + public static FileProperties FileProperties(DateTimeOffset? creationTime = null, DateTimeOffset lastModified = default, string contentLength = null, string contentType = null, string fileMode = null) { return new FileProperties( creationTime, @@ -1152,7 +1160,7 @@ public static FileProperties FileProperties(DateTimeOffset? creationTime = null, /// The user name of the Account. /// Whether the Account should be an administrator on the Compute Node. The default value is false. /// The time at which the Account should expire. If omitted, the default is 1 day from the current time. For Linux Compute Nodes, the expiryTime has a precision up to a day. - /// The password of the Account. The password is required for Windows Compute Nodes (those created with 'virtualMachineConfiguration' using a Windows Image reference). For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. + /// The password of the Account. The password is required for Windows Compute Nodes. For Linux Compute Nodes, the password can optionally be specified along with the sshPublicKey property. /// The SSH public key that can be used for remote login to the Compute Node. The public key should be compatible with OpenSSH encoding and should be base 64 encoded. This property can be specified only for Linux Compute Nodes. If this is specified for a Windows Compute Node, then the Batch service rejects the request; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). /// A new instance for mocking. public static BatchNodeUserCreateContent BatchNodeUserCreateContent(string name = null, bool? isAdmin = null, DateTimeOffset? expiryTime = null, string password = null, string sshPublicKey = null) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerConfiguration.Serialization.cs index d810361f3d8c2..a292ecd986876 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class ContainerConfiguration : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(ContainerConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("type"u8); writer.WriteStringValue(Type.ToString()); if (Optional.IsCollectionDefined(ContainerImageNames)) @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } ContainerConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.Serialization.cs new file mode 100644 index 0000000000000..7136f9b4e1ff3 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.Serialization.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Compute.Batch +{ + public partial class ContainerHostBatchBindMountEntry : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContainerHostBatchBindMountEntry)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source.Value.ToString()); + } + if (Optional.IsDefined(IsReadOnly)) + { + writer.WritePropertyName("isReadOnly"u8); + writer.WriteBooleanValue(IsReadOnly.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ContainerHostBatchBindMountEntry IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContainerHostBatchBindMountEntry)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContainerHostBatchBindMountEntry(document.RootElement, options); + } + + internal static ContainerHostBatchBindMountEntry DeserializeContainerHostBatchBindMountEntry(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContainerHostDataPath? source = default; + bool? isReadOnly = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("source"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + source = new ContainerHostDataPath(property.Value.GetString()); + continue; + } + if (property.NameEquals("isReadOnly"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + isReadOnly = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ContainerHostBatchBindMountEntry(source, isReadOnly, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ContainerHostBatchBindMountEntry)} does not support writing '{options.Format}' format."); + } + } + + ContainerHostBatchBindMountEntry IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeContainerHostBatchBindMountEntry(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContainerHostBatchBindMountEntry)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ContainerHostBatchBindMountEntry FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeContainerHostBatchBindMountEntry(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.cs new file mode 100644 index 0000000000000..129e36e79f3d9 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostBatchBindMountEntry.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Compute.Batch +{ + /// The entry of path and mount mode you want to mount into task container. + public partial class ContainerHostBatchBindMountEntry + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ContainerHostBatchBindMountEntry() + { + } + + /// Initializes a new instance of . + /// The path which be mounted to container customer can select. + /// Mount this source path as read-only mode or not. Default value is false (read/write mode). For Linux, if you mount this path as a read/write mode, this does not mean that all users in container have the read/write access for the path, it depends on the access in host VM. If this path is mounted read-only, all users within the container will not be able to modify the path. + /// Keeps track of any properties unknown to the library. + internal ContainerHostBatchBindMountEntry(ContainerHostDataPath? source, bool? isReadOnly, IDictionary serializedAdditionalRawData) + { + Source = source; + IsReadOnly = isReadOnly; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The path which be mounted to container customer can select. + public ContainerHostDataPath? Source { get; set; } + /// Mount this source path as read-only mode or not. Default value is false (read/write mode). For Linux, if you mount this path as a read/write mode, this does not mean that all users in container have the read/write access for the path, it depends on the access in host VM. If this path is mounted read-only, all users within the container will not be able to modify the path. + public bool? IsReadOnly { get; set; } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostDataPath.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostDataPath.cs new file mode 100644 index 0000000000000..db0708edc7759 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerHostDataPath.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Compute.Batch +{ + /// The paths which will be mounted to container task's container. + public readonly partial struct ContainerHostDataPath : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ContainerHostDataPath(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string SharedValue = "Shared"; + private const string StartupValue = "Startup"; + private const string VfsMountsValue = "VfsMounts"; + private const string TaskValue = "Task"; + private const string JobPrepValue = "JobPrep"; + private const string ApplicationsValue = "Applications"; + + /// The path for multi-instances task to shared their files. + public static ContainerHostDataPath Shared { get; } = new ContainerHostDataPath(SharedValue); + /// The path for start task. + public static ContainerHostDataPath Startup { get; } = new ContainerHostDataPath(StartupValue); + /// The path contains all virtual file systems are mounted on this node. + public static ContainerHostDataPath VfsMounts { get; } = new ContainerHostDataPath(VfsMountsValue); + /// The task path. + public static ContainerHostDataPath Task { get; } = new ContainerHostDataPath(TaskValue); + /// The job-prep task path. + public static ContainerHostDataPath JobPrep { get; } = new ContainerHostDataPath(JobPrepValue); + /// The applications path. + public static ContainerHostDataPath Applications { get; } = new ContainerHostDataPath(ApplicationsValue); + /// Determines if two values are the same. + public static bool operator ==(ContainerHostDataPath left, ContainerHostDataPath right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ContainerHostDataPath left, ContainerHostDataPath right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ContainerHostDataPath(string value) => new ContainerHostDataPath(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ContainerHostDataPath other && Equals(other); + /// + public bool Equals(ContainerHostDataPath other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerRegistryReference.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerRegistryReference.Serialization.cs index ccca2db026d5c..8d38b0391d1fb 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerRegistryReference.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ContainerRegistryReference.Serialization.cs @@ -18,6 +18,15 @@ public partial class ContainerRegistryReference : IUtf8JsonSerializable, IJsonMo void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe throw new FormatException($"The model {nameof(ContainerRegistryReference)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Username)) { writer.WritePropertyName("username"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRe #endif } } - writer.WriteEndObject(); } ContainerRegistryReference IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DataDisk.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DataDisk.Serialization.cs index d6cbd406d7a78..78c2cf2bd1b9c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DataDisk.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DataDisk.Serialization.cs @@ -18,6 +18,15 @@ public partial class DataDisk : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions throw new FormatException($"The model {nameof(DataDisk)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("lun"u8); writer.WriteNumberValue(LogicalUnitNumber); if (Optional.IsDefined(Caching)) @@ -55,7 +63,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions #endif } } - writer.WriteEndObject(); } DataDisk IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskPlacement.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskPlacement.cs index 4a31115ce5680..1a0e2a6dd4398 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskPlacement.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskPlacement.cs @@ -10,7 +10,7 @@ namespace Azure.Compute.Batch { - /// AccessDiffDiskPlacementScope enums. + /// Specifies the ephemeral disk placement for operating system disk for all compute nodes (VMs) in the pool. This property can be used by user in the request to choose which location the operating system should be in. e.g., cache disk space for Ephemeral OS disk provisioning. For more information on Ephemeral OS disk size requirements, please refer to Ephemeral OS disk size requirements for Windows VMs at https://docs.microsoft.com/azure/virtual-machines/windows/ephemeral-os-disks#size-requirements and Linux VMs at https://docs.microsoft.com/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements. public readonly partial struct DiffDiskPlacement : IEquatable { private readonly string _value; diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.Serialization.cs index 193f5964b6bd4..132dfcdd78815 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class DiffDiskSettings : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter throw new FormatException($"The model {nameof(DiffDiskSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Placement)) { writer.WritePropertyName("placement"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter #endif } } - writer.WriteEndObject(); } DiffDiskSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.cs index 03ec91b162edb..615bc8670e7ad 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DiffDiskSettings.cs @@ -54,7 +54,7 @@ public DiffDiskSettings() } /// Initializes a new instance of . - /// Specifies the ephemeral disk placement for operating system disk for all VMs in the pool. This property can be used by user in the request to choose the location e.g., cache disk space for Ephemeral OS disk provisioning. For more information on Ephemeral OS disk size requirements, please refer to Ephemeral OS disk size requirements for Windows VMs at https://docs.microsoft.com/en-us/azure/virtual-machines/windows/ephemeral-os-disks#size-requirements and Linux VMs at https://docs.microsoft.com/en-us/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements. + /// Specifies the ephemeral disk placement for operating system disk for all VMs in the pool. This property can be used by user in the request to choose the location e.g., cache disk space for Ephemeral OS disk provisioning. For more information on Ephemeral OS disk size requirements, please refer to Ephemeral OS disk size requirements for Windows VMs at https://docs.microsoft.com/azure/virtual-machines/windows/ephemeral-os-disks#size-requirements and Linux VMs at https://docs.microsoft.com/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements. /// Keeps track of any properties unknown to the library. internal DiffDiskSettings(DiffDiskPlacement? placement, IDictionary serializedAdditionalRawData) { @@ -62,7 +62,7 @@ internal DiffDiskSettings(DiffDiskPlacement? placement, IDictionary Specifies the ephemeral disk placement for operating system disk for all VMs in the pool. This property can be used by user in the request to choose the location e.g., cache disk space for Ephemeral OS disk provisioning. For more information on Ephemeral OS disk size requirements, please refer to Ephemeral OS disk size requirements for Windows VMs at https://docs.microsoft.com/en-us/azure/virtual-machines/windows/ephemeral-os-disks#size-requirements and Linux VMs at https://docs.microsoft.com/en-us/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements. + /// Specifies the ephemeral disk placement for operating system disk for all VMs in the pool. This property can be used by user in the request to choose the location e.g., cache disk space for Ephemeral OS disk provisioning. For more information on Ephemeral OS disk size requirements, please refer to Ephemeral OS disk size requirements for Windows VMs at https://docs.microsoft.com/azure/virtual-machines/windows/ephemeral-os-disks#size-requirements and Linux VMs at https://docs.microsoft.com/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements. public DiffDiskPlacement? Placement { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.Serialization.cs index 1719ed41f1880..dfe5e2e9a31ca 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class DiskEncryptionConfiguration : IUtf8JsonSerializable, IJsonM void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR throw new FormatException($"The model {nameof(DiskEncryptionConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsCollectionDefined(Targets)) { writer.WritePropertyName("targets"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR #endif } } - writer.WriteEndObject(); } DiskEncryptionConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.cs index 1a8f6a27f6ca8..2ad49f0b0d743 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/DiskEncryptionConfiguration.cs @@ -56,7 +56,7 @@ public DiskEncryptionConfiguration() } /// Initializes a new instance of . - /// The list of disk targets Batch Service will encrypt on the compute node. If omitted, no disks on the compute nodes in the pool will be encrypted. On Linux pool, only "TemporaryDisk" is supported; on Windows pool, "OsDisk" and "TemporaryDisk" must be specified. + /// The list of disk targets Batch Service will encrypt on the compute node. The list of disk targets Batch Service will encrypt on the compute node. /// Keeps track of any properties unknown to the library. internal DiskEncryptionConfiguration(IList targets, IDictionary serializedAdditionalRawData) { @@ -64,7 +64,7 @@ internal DiskEncryptionConfiguration(IList targets, IDicti _serializedAdditionalRawData = serializedAdditionalRawData; } - /// The list of disk targets Batch Service will encrypt on the compute node. If omitted, no disks on the compute nodes in the pool will be encrypted. On Linux pool, only "TemporaryDisk" is supported; on Windows pool, "OsDisk" and "TemporaryDisk" must be specified. + /// The list of disk targets Batch Service will encrypt on the compute node. The list of disk targets Batch Service will encrypt on the compute node. public IList Targets { get; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/Docs/BatchClient.xml b/sdk/batch/Azure.Compute.Batch/src/Generated/Docs/BatchClient.xml index 105b119931537..2cc4a0efd0242 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/Docs/BatchClient.xml +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/Docs/BatchClient.xml @@ -151,7 +151,10 @@ BatchPoolCreateContent pool = new BatchPoolCreateContent("mypool001", "standard_ }, Caching = CachingType.ReadWrite, DiskSizeGB = 100, - ManagedDisk = new ManagedDisk(StorageAccountType.StandardSSDLRS), + ManagedDisk = new ManagedDisk + { + StorageAccountType = StorageAccountType.StandardSSDLRS, + }, }, }, ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -427,7 +430,10 @@ BatchPoolCreateContent pool = new BatchPoolCreateContent("mypool001", "standard_ }, Caching = CachingType.ReadWrite, DiskSizeGB = 100, - ManagedDisk = new ManagedDisk(StorageAccountType.StandardSSDLRS), + ManagedDisk = new ManagedDisk + { + StorageAccountType = StorageAccountType.StandardSSDLRS, + }, }, }, ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -2109,7 +2115,7 @@ Response response = client.RemoveNodes("poolId", content); Console.WriteLine(response.Status); ]]> - + This sample shows how to call DeleteJobAsync. - + This sample shows how to call DeleteJob. - + This sample shows how to call TerminateJobAsync. - + This sample shows how to call TerminateJob. - + This sample shows how to call TerminateJobAsync. - + This sample shows how to call TerminateJob. - + This sample shows how to call DeleteJobScheduleAsync. - + This sample shows how to call DeleteJobSchedule. - + This sample shows how to call TerminateJobScheduleAsync. - + This sample shows how to call TerminateJobSchedule. "); TokenCredential credential = new DefaultAzureCredential(); BatchClient client = new BatchClient(endpoint, credential); +BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") +{ + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, +}; +Response response = await client.CreateTaskAsync("jobId", task); +]]> +This sample shows how to call CreateTaskAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") +{ + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }, new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, +}; +Response response = await client.CreateTaskAsync("jobId", task); +]]> +This sample shows how to call CreateTaskAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "cmd /c exit 3") { ExitConditions = new ExitConditions @@ -4172,6 +4308,64 @@ Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); BatchClient client = new BatchClient(endpoint, credential); +BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") +{ + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, +}; +Response response = client.CreateTask("jobId", task); +]]> +This sample shows how to call CreateTask. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") +{ + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }, new ContainerHostBatchBindMountEntry + { + Source = ContainerHostDataPath.Task, + IsReadOnly = true, + }}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, +}; +Response response = client.CreateTask("jobId", task); +]]> +This sample shows how to call CreateTask. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "cmd /c exit 3") { ExitConditions = new ExitConditions @@ -4256,6 +4450,81 @@ Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); BatchClient client = new BatchClient(endpoint, credential); +using RequestContent content = RequestContent.Create(new +{ + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { + new + { + source = "Task", + isReadOnly = true, + } + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, +}); +Response response = await client.CreateTaskAsync("jobId", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateTaskAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { + new + { + source = "Task", + isReadOnly = true, + }, + new + { + source = "Task", + isReadOnly = true, + } + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, +}); +Response response = await client.CreateTaskAsync("jobId", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateTaskAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + using RequestContent content = RequestContent.Create(new { id = "taskId", @@ -4356,6 +4625,81 @@ Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); BatchClient client = new BatchClient(endpoint, credential); +using RequestContent content = RequestContent.Create(new +{ + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { + new + { + source = "Task", + isReadOnly = true, + } + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, +}); +Response response = client.CreateTask("jobId", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateTask. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { + new + { + source = "Task", + isReadOnly = true, + }, + new + { + source = "Task", + isReadOnly = true, + } + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, +}); +Response response = client.CreateTask("jobId", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateTask. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + using RequestContent content = RequestContent.Create(new { id = "taskId", @@ -5282,6 +5626,132 @@ BatchClient client = new BatchClient(endpoint, credential); using RequestContent content = null; Response response = client.RebootNode("poolId", "tvm-1695681911_1-20161122t193202z", content); +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call StartNodeAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = await client.StartNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call StartNode. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = client.StartNode("poolId", "tvm-1695681911_1-20161122t193202z"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call ReimageNodeAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = await client.ReimageNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); +]]> + + + +This sample shows how to call ReimageNode. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = client.ReimageNode("poolId", "tvm-1695681911_1-20161122t193202z"); +]]> + + + +This sample shows how to call ReimageNodeAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = null; +Response response = await client.ReimageNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call ReimageNode. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = null; +Response response = client.ReimageNode("poolId", "tvm-1695681911_1-20161122t193202z", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeallocateNodeAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = await client.DeallocateNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); +]]> + + + +This sample shows how to call DeallocateNode. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +Response response = client.DeallocateNode("poolId", "tvm-1695681911_1-20161122t193202z"); +]]> + + + +This sample shows how to call DeallocateNodeAsync. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = null; +Response response = await client.DeallocateNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeallocateNode. +"); +TokenCredential credential = new DefaultAzureCredential(); +BatchClient client = new BatchClient(endpoint, credential); + +using RequestContent content = null; +Response response = client.DeallocateNode("poolId", "tvm-1695681911_1-20161122t193202z", content); + Console.WriteLine(response.Status); ]]> diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/EnvironmentSetting.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/EnvironmentSetting.Serialization.cs index 5394104578495..2012b1546cc14 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/EnvironmentSetting.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/EnvironmentSetting.Serialization.cs @@ -18,6 +18,15 @@ public partial class EnvironmentSetting : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(EnvironmentSetting)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); if (Optional.IsDefined(Value)) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } EnvironmentSetting IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeMapping.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeMapping.Serialization.cs index 9ec340db7bde9..6740fcaab90d1 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeMapping.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeMapping.Serialization.cs @@ -18,6 +18,15 @@ public partial class ExitCodeMapping : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO throw new FormatException($"The model {nameof(ExitCodeMapping)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("code"u8); writer.WriteNumberValue(Code); writer.WritePropertyName("exitOptions"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO #endif } } - writer.WriteEndObject(); } ExitCodeMapping IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeRangeMapping.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeRangeMapping.Serialization.cs index d7ee046886d24..d4920fddc4996 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeRangeMapping.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitCodeRangeMapping.Serialization.cs @@ -18,6 +18,15 @@ public partial class ExitCodeRangeMapping : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(ExitCodeRangeMapping)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("start"u8); writer.WriteNumberValue(Start); writer.WritePropertyName("end"u8); @@ -47,7 +55,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } ExitCodeRangeMapping IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitConditions.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitConditions.Serialization.cs index 68222969f5684..69472567f8fee 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitConditions.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitConditions.Serialization.cs @@ -18,6 +18,15 @@ public partial class ExitConditions : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(ExitConditions)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsCollectionDefined(ExitCodes)) { writer.WritePropertyName("exitCodes"u8); @@ -76,7 +84,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } ExitConditions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitOptions.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitOptions.Serialization.cs index 6c8e87057cb4e..697b8fe663bea 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ExitOptions.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ExitOptions.Serialization.cs @@ -18,6 +18,15 @@ public partial class ExitOptions : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio throw new FormatException($"The model {nameof(ExitOptions)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(JobAction)) { writer.WritePropertyName("jobAction"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio #endif } } - writer.WriteEndObject(); } ExitOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.Serialization.cs index b0ab715bf8ca1..e55837402ccac 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.Serialization.cs @@ -18,6 +18,15 @@ public partial class FileProperties : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(FileProperties)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(CreationTime)) { writer.WritePropertyName("creationTime"u8); @@ -34,7 +42,7 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp writer.WritePropertyName("lastModified"u8); writer.WriteStringValue(LastModified, "O"); writer.WritePropertyName("contentLength"u8); - writer.WriteNumberValue(ContentLength); + writer.WriteStringValue(ContentLength); if (Optional.IsDefined(ContentType)) { writer.WritePropertyName("contentType"u8); @@ -60,7 +68,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } FileProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -85,7 +92,7 @@ internal static FileProperties DeserializeFileProperties(JsonElement element, Mo } DateTimeOffset? creationTime = default; DateTimeOffset lastModified = default; - long contentLength = default; + string contentLength = default; string contentType = default; string fileMode = default; IDictionary serializedAdditionalRawData = default; @@ -108,7 +115,7 @@ internal static FileProperties DeserializeFileProperties(JsonElement element, Mo } if (property.NameEquals("contentLength"u8)) { - contentLength = property.Value.GetInt64(); + contentLength = property.Value.GetString(); continue; } if (property.NameEquals("contentType"u8)) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.cs index 62a026bccce26..112b2425a7765 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/FileProperties.cs @@ -48,8 +48,11 @@ public partial class FileProperties /// Initializes a new instance of . /// The time at which the file was last modified. /// The length of the file. - internal FileProperties(DateTimeOffset lastModified, long contentLength) + /// is null. + internal FileProperties(DateTimeOffset lastModified, string contentLength) { + Argument.AssertNotNull(contentLength, nameof(contentLength)); + LastModified = lastModified; ContentLength = contentLength; } @@ -61,7 +64,7 @@ internal FileProperties(DateTimeOffset lastModified, long contentLength) /// The content type of the file. /// The file mode attribute in octal format. The file mode is returned only for files on Linux Compute Nodes. /// Keeps track of any properties unknown to the library. - internal FileProperties(DateTimeOffset? creationTime, DateTimeOffset lastModified, long contentLength, string contentType, string fileMode, IDictionary serializedAdditionalRawData) + internal FileProperties(DateTimeOffset? creationTime, DateTimeOffset lastModified, string contentLength, string contentType, string fileMode, IDictionary serializedAdditionalRawData) { CreationTime = creationTime; LastModified = lastModified; @@ -81,7 +84,7 @@ internal FileProperties() /// The time at which the file was last modified. public DateTimeOffset LastModified { get; } /// The length of the file. - public long ContentLength { get; } + public string ContentLength { get; } /// The content type of the file. public string ContentType { get; } /// The file mode attribute in octal format. The file mode is returned only for files on Linux Compute Nodes. diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/HttpHeader.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/HttpHeader.Serialization.cs index 09ded47c08e7b..f731ddde841fb 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/HttpHeader.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/HttpHeader.Serialization.cs @@ -18,6 +18,15 @@ public partial class HttpHeader : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption throw new FormatException($"The model {nameof(HttpHeader)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); if (Optional.IsDefined(Value)) @@ -48,7 +56,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption #endif } } - writer.WriteEndObject(); } HttpHeader IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.Serialization.cs index 117fab510ee67..ba2303bceac3d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.Serialization.cs @@ -18,6 +18,15 @@ public partial class ImageReference : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(ImageReference)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Publisher)) { writer.WritePropertyName("publisher"u8); @@ -56,6 +64,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp writer.WritePropertyName("exactVersion"u8); writer.WriteStringValue(ExactVersion); } + if (Optional.IsDefined(SharedGalleryImageId)) + { + writer.WritePropertyName("sharedGalleryImageId"u8); + writer.WriteStringValue(SharedGalleryImageId); + } + if (Optional.IsDefined(CommunityGalleryImageId)) + { + writer.WritePropertyName("communityGalleryImageId"u8); + writer.WriteStringValue(CommunityGalleryImageId); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -71,7 +89,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } ImageReference IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -100,6 +117,8 @@ internal static ImageReference DeserializeImageReference(JsonElement element, Mo string version = default; string virtualMachineImageId = default; string exactVersion = default; + string sharedGalleryImageId = default; + string communityGalleryImageId = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -134,6 +153,16 @@ internal static ImageReference DeserializeImageReference(JsonElement element, Mo exactVersion = property.Value.GetString(); continue; } + if (property.NameEquals("sharedGalleryImageId"u8)) + { + sharedGalleryImageId = property.Value.GetString(); + continue; + } + if (property.NameEquals("communityGalleryImageId"u8)) + { + communityGalleryImageId = property.Value.GetString(); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); @@ -147,6 +176,8 @@ internal static ImageReference DeserializeImageReference(JsonElement element, Mo version, virtualMachineImageId, exactVersion, + sharedGalleryImageId, + communityGalleryImageId, serializedAdditionalRawData); } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.cs index 6c5cb5fc883f4..caabb92b99f85 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ImageReference.cs @@ -61,8 +61,10 @@ public ImageReference() /// The version of the Azure Virtual Machines Marketplace Image. A value of 'latest' can be specified to select the latest version of an Image. If omitted, the default is 'latest'. /// The ARM resource identifier of the Azure Compute Gallery Image. Compute Nodes in the Pool will be created using this Image Id. This is of the form /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/galleries/{galleryName}/images/{imageDefinitionName}/versions/{VersionId} or /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/galleries/{galleryName}/images/{imageDefinitionName} for always defaulting to the latest image version. This property is mutually exclusive with other ImageReference properties. The Azure Compute Gallery Image must have replicas in the same region and must be in the same subscription as the Azure Batch account. If the image version is not specified in the imageId, the latest version will be used. For information about the firewall settings for the Batch Compute Node agent to communicate with the Batch service see https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. /// The specific version of the platform image or marketplace image used to create the node. This read-only field differs from 'version' only if the value specified for 'version' when the pool was created was 'latest'. + /// The shared gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from shared gallery image GET call. + /// The community gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from community gallery image GET call. /// Keeps track of any properties unknown to the library. - internal ImageReference(string publisher, string offer, string sku, string version, string virtualMachineImageId, string exactVersion, IDictionary serializedAdditionalRawData) + internal ImageReference(string publisher, string offer, string sku, string version, string virtualMachineImageId, string exactVersion, string sharedGalleryImageId, string communityGalleryImageId, IDictionary serializedAdditionalRawData) { Publisher = publisher; Offer = offer; @@ -70,6 +72,8 @@ internal ImageReference(string publisher, string offer, string sku, string versi Version = version; VirtualMachineImageId = virtualMachineImageId; ExactVersion = exactVersion; + SharedGalleryImageId = sharedGalleryImageId; + CommunityGalleryImageId = communityGalleryImageId; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -85,5 +89,9 @@ internal ImageReference(string publisher, string offer, string sku, string versi public string VirtualMachineImageId { get; set; } /// The specific version of the platform image or marketplace image used to create the node. This read-only field differs from 'version' only if the value specified for 'version' when the pool was created was 'latest'. public string ExactVersion { get; } + /// The shared gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from shared gallery image GET call. + public string SharedGalleryImageId { get; set; } + /// The community gallery image unique identifier. This property is mutually exclusive with other properties and can be fetched from community gallery image GET call. + public string CommunityGalleryImageId { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/InboundEndpoint.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/InboundEndpoint.Serialization.cs index 1601adf208238..c961b2e36b2d5 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/InboundEndpoint.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/InboundEndpoint.Serialization.cs @@ -18,6 +18,15 @@ public partial class InboundEndpoint : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO throw new FormatException($"The model {nameof(InboundEndpoint)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); writer.WritePropertyName("protocol"u8); @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO #endif } } - writer.WriteEndObject(); } InboundEndpoint IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/InboundNatPool.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/InboundNatPool.Serialization.cs index 596bce51442d3..51399fb97177d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/InboundNatPool.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/InboundNatPool.Serialization.cs @@ -18,6 +18,15 @@ public partial class InboundNatPool : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(InboundNatPool)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); writer.WritePropertyName("protocol"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } InboundNatPool IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/InstanceViewStatus.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/InstanceViewStatus.Serialization.cs index 1d458a88f409e..ee7413989a89e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/InstanceViewStatus.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/InstanceViewStatus.Serialization.cs @@ -18,6 +18,15 @@ public partial class InstanceViewStatus : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(InstanceViewStatus)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Code)) { writer.WritePropertyName("code"u8); @@ -66,7 +74,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } InstanceViewStatus IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/LinuxUserConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/LinuxUserConfiguration.Serialization.cs index bb92943b03125..c98e52d651321 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/LinuxUserConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/LinuxUserConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class LinuxUserConfiguration : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(LinuxUserConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Uid)) { writer.WritePropertyName("uid"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } LinuxUserConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.Serialization.cs index b3cd49d9cb82b..f94ca42197905 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.Serialization.cs @@ -18,6 +18,15 @@ public partial class ManagedDisk : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,9 +34,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio throw new FormatException($"The model {nameof(ManagedDisk)} does not support writing '{format}' format."); } - writer.WriteStartObject(); - writer.WritePropertyName("storageAccountType"u8); - writer.WriteStringValue(StorageAccountType.ToString()); + if (Optional.IsDefined(StorageAccountType)) + { + writer.WritePropertyName("storageAccountType"u8); + writer.WriteStringValue(StorageAccountType.Value.ToString()); + } + if (Optional.IsDefined(SecurityProfile)) + { + writer.WritePropertyName("securityProfile"u8); + writer.WriteObjectValue(SecurityProfile, options); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -43,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio #endif } } - writer.WriteEndObject(); } ManagedDisk IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) @@ -66,23 +81,37 @@ internal static ManagedDisk DeserializeManagedDisk(JsonElement element, ModelRea { return null; } - StorageAccountType storageAccountType = default; + StorageAccountType? storageAccountType = default; + VMDiskSecurityProfile securityProfile = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) { if (property.NameEquals("storageAccountType"u8)) { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } storageAccountType = new StorageAccountType(property.Value.GetString()); continue; } + if (property.NameEquals("securityProfile"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + securityProfile = VMDiskSecurityProfile.DeserializeVMDiskSecurityProfile(property.Value, options); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new ManagedDisk(storageAccountType, serializedAdditionalRawData); + return new ManagedDisk(storageAccountType, securityProfile, serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.cs index f8ada07e8f729..32a2f0e6cc65b 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ManagedDisk.cs @@ -46,27 +46,24 @@ public partial class ManagedDisk private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The storage account type for managed disk. - public ManagedDisk(StorageAccountType storageAccountType) + public ManagedDisk() { - StorageAccountType = storageAccountType; } /// Initializes a new instance of . /// The storage account type for managed disk. + /// Specifies the security profile settings for the managed disk. /// Keeps track of any properties unknown to the library. - internal ManagedDisk(StorageAccountType storageAccountType, IDictionary serializedAdditionalRawData) + internal ManagedDisk(StorageAccountType? storageAccountType, VMDiskSecurityProfile securityProfile, IDictionary serializedAdditionalRawData) { StorageAccountType = storageAccountType; + SecurityProfile = securityProfile; _serializedAdditionalRawData = serializedAdditionalRawData; } - /// Initializes a new instance of for deserialization. - internal ManagedDisk() - { - } - /// The storage account type for managed disk. - public StorageAccountType StorageAccountType { get; set; } + public StorageAccountType? StorageAccountType { get; set; } + /// Specifies the security profile settings for the managed disk. + public VMDiskSecurityProfile SecurityProfile { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/MetadataItem.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/MetadataItem.Serialization.cs index 424b946c9ac81..a24204f32fb5e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/MetadataItem.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/MetadataItem.Serialization.cs @@ -18,6 +18,15 @@ public partial class MetadataItem : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(MetadataItem)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); writer.WritePropertyName("value"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } MetadataItem IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/MountConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/MountConfiguration.Serialization.cs index 387392062b56c..e1f157e3c3614 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/MountConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/MountConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class MountConfiguration : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(MountConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(AzureBlobFileSystemConfiguration)) { writer.WritePropertyName("azureBlobFileSystemConfiguration"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } MountConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/MultiInstanceSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/MultiInstanceSettings.Serialization.cs index 1b72767d544d7..6303635b5a717 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/MultiInstanceSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/MultiInstanceSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class MultiInstanceSettings : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(MultiInstanceSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(NumberOfInstances)) { writer.WritePropertyName("numberOfInstances"u8); @@ -58,7 +66,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } MultiInstanceSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/NameValuePair.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/NameValuePair.Serialization.cs index df9413cc5c250..7afdc4b31d318 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/NameValuePair.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/NameValuePair.Serialization.cs @@ -18,6 +18,15 @@ public partial class NameValuePair : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(NameValuePair)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Name)) { writer.WritePropertyName("name"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } NameValuePair IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.Serialization.cs index 36ede0f652592..ff7fdf1adba43 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class NetworkConfiguration : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(NetworkConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(SubnetId)) { writer.WritePropertyName("subnetId"u8); @@ -66,7 +74,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } NetworkConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.cs index e69a292a35016..9c31a68578661 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkConfiguration.cs @@ -51,10 +51,10 @@ public NetworkConfiguration() } /// Initializes a new instance of . - /// The ARM resource identifier of the virtual network subnet which the Compute Nodes of the Pool will join. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes in the Pool. If the subnet doesn't have enough free IP addresses, the Pool will partially allocate Nodes and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. For Pools created with virtualMachineConfiguration only ARM virtual networks ('Microsoft.Network/virtualNetworks') are supported. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. For Pools created with a virtual machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// The ARM resource identifier of the virtual network subnet which the Compute Nodes of the Pool will join. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes in the Pool. If the subnet doesn't have enough free IP addresses, the Pool will partially allocate Nodes and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. Only ARM virtual networks ('Microsoft.Network/virtualNetworks') are supported. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. Enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. /// The scope of dynamic vnet assignment. - /// The configuration for endpoints on Compute Nodes in the Batch Pool. Pool endpoint configuration is only supported on Pools with the virtualMachineConfiguration property. - /// The Public IPAddress configuration for Compute Nodes in the Batch Pool. Public IP configuration property is only supported on Pools with the virtualMachineConfiguration property. + /// The configuration for endpoints on Compute Nodes in the Batch Pool. + /// The Public IPAddress configuration for Compute Nodes in the Batch Pool. /// Whether this pool should enable accelerated networking. Accelerated networking enables single root I/O virtualization (SR-IOV) to a VM, which may lead to improved networking performance. For more details, see: https://learn.microsoft.com/azure/virtual-network/accelerated-networking-overview. /// Keeps track of any properties unknown to the library. internal NetworkConfiguration(string subnetId, DynamicVNetAssignmentScope? dynamicVNetAssignmentScope, BatchPoolEndpointConfiguration endpointConfiguration, PublicIpAddressConfiguration publicIpAddressConfiguration, bool? enableAcceleratedNetworking, IDictionary serializedAdditionalRawData) @@ -67,13 +67,13 @@ internal NetworkConfiguration(string subnetId, DynamicVNetAssignmentScope? dynam _serializedAdditionalRawData = serializedAdditionalRawData; } - /// The ARM resource identifier of the virtual network subnet which the Compute Nodes of the Pool will join. This is of the form /subscriptions/{subscription}/resourceGroups/{group}/providers/{provider}/virtualNetworks/{network}/subnets/{subnet}. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes in the Pool. If the subnet doesn't have enough free IP addresses, the Pool will partially allocate Nodes and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. For Pools created with virtualMachineConfiguration only ARM virtual networks ('Microsoft.Network/virtualNetworks') are supported. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. For Pools created with a virtual machine configuration, enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. + /// The ARM resource identifier of the virtual network subnet which the Compute Nodes of the Pool will join. The virtual network must be in the same region and subscription as the Azure Batch Account. The specified subnet should have enough free IP addresses to accommodate the number of Compute Nodes in the Pool. If the subnet doesn't have enough free IP addresses, the Pool will partially allocate Nodes and a resize error will occur. The 'MicrosoftAzureBatch' service principal must have the 'Classic Virtual Machine Contributor' Role-Based Access Control (RBAC) role for the specified VNet. The specified subnet must allow communication from the Azure Batch service to be able to schedule Tasks on the Nodes. This can be verified by checking if the specified VNet has any associated Network Security Groups (NSG). If communication to the Nodes in the specified subnet is denied by an NSG, then the Batch service will set the state of the Compute Nodes to unusable. Only ARM virtual networks ('Microsoft.Network/virtualNetworks') are supported. If the specified VNet has any associated Network Security Groups (NSG), then a few reserved system ports must be enabled for inbound communication. Enable ports 29876 and 29877, as well as port 22 for Linux and port 3389 for Windows. Also enable outbound connections to Azure Storage on port 443. For more details see: https://docs.microsoft.com/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration. public string SubnetId { get; set; } /// The scope of dynamic vnet assignment. public DynamicVNetAssignmentScope? DynamicVNetAssignmentScope { get; set; } - /// The configuration for endpoints on Compute Nodes in the Batch Pool. Pool endpoint configuration is only supported on Pools with the virtualMachineConfiguration property. + /// The configuration for endpoints on Compute Nodes in the Batch Pool. public BatchPoolEndpointConfiguration EndpointConfiguration { get; set; } - /// The Public IPAddress configuration for Compute Nodes in the Batch Pool. Public IP configuration property is only supported on Pools with the virtualMachineConfiguration property. + /// The Public IPAddress configuration for Compute Nodes in the Batch Pool. public PublicIpAddressConfiguration PublicIpAddressConfiguration { get; set; } /// Whether this pool should enable accelerated networking. Accelerated networking enables single root I/O virtualization (SR-IOV) to a VM, which may lead to improved networking performance. For more details, see: https://learn.microsoft.com/azure/virtual-network/accelerated-networking-overview. public bool? EnableAcceleratedNetworking { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkSecurityGroupRule.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkSecurityGroupRule.Serialization.cs index efdb0b503e6f5..cd12b8f43a7f0 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkSecurityGroupRule.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/NetworkSecurityGroupRule.Serialization.cs @@ -18,6 +18,15 @@ public partial class NetworkSecurityGroupRule : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(NetworkSecurityGroupRule)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("priority"u8); writer.WriteNumberValue(Priority); writer.WritePropertyName("access"u8); @@ -57,7 +65,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } NetworkSecurityGroupRule IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/NfsMountConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/NfsMountConfiguration.Serialization.cs index 653d3f6cd0a12..bc4ba0ddb9d0d 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/NfsMountConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/NfsMountConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class NfsMountConfiguration : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(NfsMountConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("source"u8); writer.WriteStringValue(Source); writer.WritePropertyName("relativeMountPath"u8); @@ -50,7 +58,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } NfsMountConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OSDisk.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OSDisk.Serialization.cs index 02a6a6a39c0d8..d99ba138078bf 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OSDisk.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OSDisk.Serialization.cs @@ -18,6 +18,15 @@ public partial class OSDisk : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions op throw new FormatException($"The model {nameof(OSDisk)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(EphemeralOSDiskSettings)) { writer.WritePropertyName("ephemeralOSDiskSettings"u8); @@ -66,7 +74,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions op #endif } } - writer.WriteEndObject(); } OSDisk IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFile.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFile.Serialization.cs index 070e9004d0dbc..f8a2ea2f6680c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFile.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFile.Serialization.cs @@ -18,6 +18,15 @@ public partial class OutputFile : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption throw new FormatException($"The model {nameof(OutputFile)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("filePattern"u8); writer.WriteStringValue(FilePattern); writer.WritePropertyName("destination"u8); @@ -47,7 +55,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOption #endif } } - writer.WriteEndObject(); } OutputFile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.Serialization.cs index 059dd2156abbd..73c24309b8c98 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.Serialization.cs @@ -18,6 +18,15 @@ public partial class OutputFileBlobContainerDestination : IUtf8JsonSerializable, void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, throw new FormatException($"The model {nameof(OutputFileBlobContainerDestination)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Path)) { writer.WritePropertyName("path"u8); @@ -63,7 +71,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, #endif } } - writer.WriteEndObject(); } OutputFileBlobContainerDestination IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.cs index 44d0f78eef822..ebdbf6465e3ab 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileBlobContainerDestination.cs @@ -60,7 +60,7 @@ public OutputFileBlobContainerDestination(string containerUrl) /// The destination blob or virtual directory within the Azure Storage container. If filePattern refers to a specific file (i.e. contains no wildcards), then path is the name of the blob to which to upload that file. If filePattern contains one or more wildcards (and therefore may match multiple files), then path is the name of the blob virtual directory (which is prepended to each blob name) to which to upload the file(s). If omitted, file(s) are uploaded to the root of the container with a blob name matching their file name. /// The URL of the container within Azure Blob Storage to which to upload the file(s). If not using a managed identity, the URL must include a Shared Access Signature (SAS) granting write permissions to the container. /// The reference to the user assigned identity to use to access Azure Blob Storage specified by containerUrl. The identity must have write access to the Azure Blob Storage container. - /// A list of name-value pairs for headers to be used in uploading output files. These headers will be specified when uploading files to Azure Storage. Official document on allowed headers when uploading blobs: https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob#request-headers-all-blob-types. + /// A list of name-value pairs for headers to be used in uploading output files. These headers will be specified when uploading files to Azure Storage. Official document on allowed headers when uploading blobs: https://docs.microsoft.com/rest/api/storageservices/put-blob#request-headers-all-blob-types. /// Keeps track of any properties unknown to the library. internal OutputFileBlobContainerDestination(string path, string containerUrl, BatchNodeIdentityReference identityReference, IList uploadHeaders, IDictionary serializedAdditionalRawData) { @@ -82,7 +82,7 @@ internal OutputFileBlobContainerDestination() public string ContainerUrl { get; set; } /// The reference to the user assigned identity to use to access Azure Blob Storage specified by containerUrl. The identity must have write access to the Azure Blob Storage container. public BatchNodeIdentityReference IdentityReference { get; set; } - /// A list of name-value pairs for headers to be used in uploading output files. These headers will be specified when uploading files to Azure Storage. Official document on allowed headers when uploading blobs: https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob#request-headers-all-blob-types. + /// A list of name-value pairs for headers to be used in uploading output files. These headers will be specified when uploading files to Azure Storage. Official document on allowed headers when uploading blobs: https://docs.microsoft.com/rest/api/storageservices/put-blob#request-headers-all-blob-types. public IList UploadHeaders { get; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileDestination.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileDestination.Serialization.cs index 6f34ab1cfc7ec..d8507e40658fb 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileDestination.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileDestination.Serialization.cs @@ -18,6 +18,15 @@ public partial class OutputFileDestination : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW throw new FormatException($"The model {nameof(OutputFileDestination)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Container)) { writer.WritePropertyName("container"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderW #endif } } - writer.WriteEndObject(); } OutputFileDestination IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileUploadConfig.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileUploadConfig.Serialization.cs index d5204fa3cb798..2d10e7f0df5a6 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileUploadConfig.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/OutputFileUploadConfig.Serialization.cs @@ -18,6 +18,15 @@ public partial class OutputFileUploadConfig : IUtf8JsonSerializable, IJsonModel< void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader throw new FormatException($"The model {nameof(OutputFileUploadConfig)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("uploadCondition"u8); writer.WriteStringValue(UploadCondition.ToString()); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader #endif } } - writer.WriteEndObject(); } OutputFileUploadConfig IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/PublicIpAddressConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/PublicIpAddressConfiguration.Serialization.cs index e98eca4d6d31a..3518a3b50f4d4 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/PublicIpAddressConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/PublicIpAddressConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class PublicIpAddressConfiguration : IUtf8JsonSerializable, IJson void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model throw new FormatException($"The model {nameof(PublicIpAddressConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(IpAddressProvisioningType)) { writer.WritePropertyName("provision"u8); @@ -56,7 +64,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model #endif } } - writer.WriteEndObject(); } PublicIpAddressConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/RecentBatchJob.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/RecentBatchJob.Serialization.cs index 2d448fb013eeb..b5a03b8a4cdb8 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/RecentBatchJob.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/RecentBatchJob.Serialization.cs @@ -18,6 +18,15 @@ public partial class RecentBatchJob : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp throw new FormatException($"The model {nameof(RecentBatchJob)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOp #endif } } - writer.WriteEndObject(); } RecentBatchJob IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ResizeError.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ResizeError.Serialization.cs index c65fd7a6c0c36..b17acb59b28d7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ResizeError.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ResizeError.Serialization.cs @@ -18,6 +18,15 @@ public partial class ResizeError : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio throw new FormatException($"The model {nameof(ResizeError)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Code)) { writer.WritePropertyName("code"u8); @@ -61,7 +69,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio #endif } } - writer.WriteEndObject(); } ResizeError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ResourceFile.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ResourceFile.Serialization.cs index 324491f6f9266..dd2fdaea9842e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ResourceFile.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ResourceFile.Serialization.cs @@ -18,6 +18,15 @@ public partial class ResourceFile : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(ResourceFile)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(AutoStorageContainerName)) { writer.WritePropertyName("autoStorageContainerName"u8); @@ -76,7 +84,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } ResourceFile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/RollingUpgradePolicy.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/RollingUpgradePolicy.Serialization.cs index 09b39011e43ba..49439ac1239be 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/RollingUpgradePolicy.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/RollingUpgradePolicy.Serialization.cs @@ -18,6 +18,15 @@ public partial class RollingUpgradePolicy : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(RollingUpgradePolicy)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(EnableCrossZoneUpgrade)) { writer.WritePropertyName("enableCrossZoneUpgrade"u8); @@ -76,7 +84,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } RollingUpgradePolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityEncryptionTypes.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityEncryptionTypes.cs new file mode 100644 index 0000000000000..2ab799148d359 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityEncryptionTypes.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Compute.Batch +{ + /// SecurityEncryptionTypes enums. + public readonly partial struct SecurityEncryptionTypes : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SecurityEncryptionTypes(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NonPersistedTPMValue = "NonPersistedTPM"; + private const string VMGuestStateOnlyValue = "VMGuestStateOnly"; + + /// NonPersistedTPM. + public static SecurityEncryptionTypes NonPersistedTPM { get; } = new SecurityEncryptionTypes(NonPersistedTPMValue); + /// VMGuestStateOnly. + public static SecurityEncryptionTypes VMGuestStateOnly { get; } = new SecurityEncryptionTypes(VMGuestStateOnlyValue); + /// Determines if two values are the same. + public static bool operator ==(SecurityEncryptionTypes left, SecurityEncryptionTypes right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SecurityEncryptionTypes left, SecurityEncryptionTypes right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SecurityEncryptionTypes(string value) => new SecurityEncryptionTypes(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SecurityEncryptionTypes other && Equals(other); + /// + public bool Equals(SecurityEncryptionTypes other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.Serialization.cs index 14ea5a55a6077..fd12184d9bd7e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.Serialization.cs @@ -18,6 +18,15 @@ public partial class SecurityProfile : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO throw new FormatException($"The model {nameof(SecurityProfile)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("encryptionAtHost"u8); writer.WriteBooleanValue(EncryptionAtHost); writer.WritePropertyName("securityType"u8); @@ -47,7 +55,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterO #endif } } - writer.WriteEndObject(); } SecurityProfile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.cs index 4f81495d150a0..d34a776e13687 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityProfile.cs @@ -46,7 +46,7 @@ public partial class SecurityProfile private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. + /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. For more information on encryption at host requirements, please refer to https://learn.microsoft.com/azure/virtual-machines/disk-encryption#supported-vm-sizes. /// Specifies the SecurityType of the virtual machine. It has to be set to any specified value to enable UefiSettings. /// Specifies the security settings like secure boot and vTPM used while creating the virtual machine. Specifies the security settings like secure boot and vTPM used while creating the virtual machine. /// is null. @@ -60,7 +60,7 @@ public SecurityProfile(bool encryptionAtHost, SecurityTypes securityType, UefiSe } /// Initializes a new instance of . - /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. + /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. For more information on encryption at host requirements, please refer to https://learn.microsoft.com/azure/virtual-machines/disk-encryption#supported-vm-sizes. /// Specifies the SecurityType of the virtual machine. It has to be set to any specified value to enable UefiSettings. /// Specifies the security settings like secure boot and vTPM used while creating the virtual machine. Specifies the security settings like secure boot and vTPM used while creating the virtual machine. /// Keeps track of any properties unknown to the library. @@ -77,7 +77,7 @@ internal SecurityProfile() { } - /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. + /// This property can be used by user in the request to enable or disable the Host Encryption for the virtual machine or virtual machine scale set. This will enable the encryption for all the disks including Resource/Temp disk at host itself. For more information on encryption at host requirements, please refer to https://learn.microsoft.com/azure/virtual-machines/disk-encryption#supported-vm-sizes. public bool EncryptionAtHost { get; set; } /// Specifies the SecurityType of the virtual machine. It has to be set to any specified value to enable UefiSettings. public SecurityTypes SecurityType { get; set; } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityTypes.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityTypes.cs index 699dde2ed57c2..bd2f2b9cd274e 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityTypes.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/SecurityTypes.cs @@ -23,9 +23,12 @@ public SecurityTypes(string value) } private const string TrustedLaunchValue = "trustedLaunch"; + private const string ConfidentialVMValue = "confidentialVM"; /// Trusted launch protects against advanced and persistent attack techniques. public static SecurityTypes TrustedLaunch { get; } = new SecurityTypes(TrustedLaunchValue); + /// Azure confidential computing offers confidential VMs are for tenants with high security and confidentiality requirements. These VMs provide a strong, hardware-enforced boundary to help meet your security needs. You can use confidential VMs for migrations without making changes to your code, with the platform protecting your VM's state from being read or modified. + public static SecurityTypes ConfidentialVM { get; } = new SecurityTypes(ConfidentialVMValue); /// Determines if two values are the same. public static bool operator ==(SecurityTypes left, SecurityTypes right) => left.Equals(right); /// Determines if two values are not the same. diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/ServiceArtifactReference.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/ServiceArtifactReference.Serialization.cs index be9f32cd663bd..33065c17f7a18 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/ServiceArtifactReference.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/ServiceArtifactReference.Serialization.cs @@ -18,6 +18,15 @@ public partial class ServiceArtifactReference : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(ServiceArtifactReference)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("id"u8); writer.WriteStringValue(Id); if (options.Format != "W" && _serializedAdditionalRawData != null) @@ -43,7 +51,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } ServiceArtifactReference IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UefiSettings.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UefiSettings.Serialization.cs index f8c1ab10a99d6..85ea45749668a 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UefiSettings.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UefiSettings.Serialization.cs @@ -18,6 +18,15 @@ public partial class UefiSettings : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(UefiSettings)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(SecureBootEnabled)) { writer.WritePropertyName("secureBootEnabled"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } UefiSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.Serialization.cs index 22e0ce0865d5d..d47f99f6b17a7 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.Serialization.cs @@ -18,6 +18,15 @@ public partial class UpgradePolicy : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt throw new FormatException($"The model {nameof(UpgradePolicy)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("mode"u8); writer.WriteStringValue(Mode.ToString()); if (Optional.IsDefined(AutomaticOsUpgradePolicy)) @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpt #endif } } - writer.WriteEndObject(); } UpgradePolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.cs index ff0839122bf21..cb00df2295784 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UpgradePolicy.cs @@ -55,7 +55,7 @@ public UpgradePolicy(UpgradeMode mode) /// Initializes a new instance of . /// Specifies the mode of an upgrade to virtual machines in the scale set.<br /><br /> Possible values are:<br /><br /> **Manual** - You control the application of updates to virtual machines in the scale set. You do this by using the manualUpgrade action.<br /><br /> **Automatic** - All virtual machines in the scale set are automatically updated at the same time.<br /><br /> **Rolling** - Scale set performs updates in batches with an optional pause time in between. /// Configuration parameters used for performing automatic OS Upgrade. The configuration parameters used for performing automatic OS upgrade. - /// The configuration parameters used while performing a rolling upgrade. This property is only supported on Pools with the virtualMachineConfiguration property. + /// The configuration parameters used while performing a rolling upgrade. /// Keeps track of any properties unknown to the library. internal UpgradePolicy(UpgradeMode mode, AutomaticOsUpgradePolicy automaticOsUpgradePolicy, RollingUpgradePolicy rollingUpgradePolicy, IDictionary serializedAdditionalRawData) { @@ -74,7 +74,7 @@ internal UpgradePolicy() public UpgradeMode Mode { get; set; } /// Configuration parameters used for performing automatic OS Upgrade. The configuration parameters used for performing automatic OS upgrade. public AutomaticOsUpgradePolicy AutomaticOsUpgradePolicy { get; set; } - /// The configuration parameters used while performing a rolling upgrade. This property is only supported on Pools with the virtualMachineConfiguration property. + /// The configuration parameters used while performing a rolling upgrade. public RollingUpgradePolicy RollingUpgradePolicy { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsContent.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsContent.Serialization.cs index 5f9cc8af92622..6b624706e6b45 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsContent.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsContent.Serialization.cs @@ -18,6 +18,15 @@ public partial class UploadBatchServiceLogsContent : IUtf8JsonSerializable, IJso void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode throw new FormatException($"The model {nameof(UploadBatchServiceLogsContent)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("containerUrl"u8); writer.WriteStringValue(ContainerUrl); writer.WritePropertyName("startTime"u8); @@ -55,7 +63,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Mode #endif } } - writer.WriteEndObject(); } UploadBatchServiceLogsContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsResult.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsResult.Serialization.cs index 70bc53481d422..51b2af6bf8776 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsResult.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UploadBatchServiceLogsResult.Serialization.cs @@ -18,6 +18,15 @@ public partial class UploadBatchServiceLogsResult : IUtf8JsonSerializable, IJson void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model throw new FormatException($"The model {nameof(UploadBatchServiceLogsResult)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("virtualDirectoryName"u8); writer.WriteStringValue(VirtualDirectoryName); writer.WritePropertyName("numberOfFilesUploaded"u8); @@ -45,7 +53,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model #endif } } - writer.WriteEndObject(); } UploadBatchServiceLogsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UserAccount.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UserAccount.Serialization.cs index 47a39b431fc5f..70a000feb2d55 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UserAccount.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UserAccount.Serialization.cs @@ -18,6 +18,15 @@ public partial class UserAccount : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio throw new FormatException($"The model {nameof(UserAccount)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); writer.WritePropertyName("password"u8); @@ -60,7 +68,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio #endif } } - writer.WriteEndObject(); } UserAccount IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UserAssignedIdentity.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UserAssignedIdentity.Serialization.cs index 33b89d5ad5ec1..ac1568a889a9f 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UserAssignedIdentity.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UserAssignedIdentity.Serialization.cs @@ -18,6 +18,15 @@ public partial class UserAssignedIdentity : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(UserAssignedIdentity)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("resourceId"u8); writer.WriteStringValue(ResourceId); if (options.Format != "W" && Optional.IsDefined(ClientId)) @@ -53,7 +61,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } UserAssignedIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/UserIdentity.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/UserIdentity.Serialization.cs index abb58abef495e..b339084557d08 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/UserIdentity.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/UserIdentity.Serialization.cs @@ -18,6 +18,15 @@ public partial class UserIdentity : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti throw new FormatException($"The model {nameof(UserIdentity)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Username)) { writer.WritePropertyName("username"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOpti #endif } } - writer.WriteEndObject(); } UserIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.Serialization.cs new file mode 100644 index 0000000000000..e9b0fb5d2ed09 --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Compute.Batch +{ + public partial class VMDiskSecurityProfile : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VMDiskSecurityProfile)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(SecurityEncryptionType)) + { + writer.WritePropertyName("securityEncryptionType"u8); + writer.WriteStringValue(SecurityEncryptionType.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VMDiskSecurityProfile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VMDiskSecurityProfile)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVMDiskSecurityProfile(document.RootElement, options); + } + + internal static VMDiskSecurityProfile DeserializeVMDiskSecurityProfile(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SecurityEncryptionTypes? securityEncryptionType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("securityEncryptionType"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + securityEncryptionType = new SecurityEncryptionTypes(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VMDiskSecurityProfile(securityEncryptionType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VMDiskSecurityProfile)} does not support writing '{options.Format}' format."); + } + } + + VMDiskSecurityProfile IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVMDiskSecurityProfile(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VMDiskSecurityProfile)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VMDiskSecurityProfile FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVMDiskSecurityProfile(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.cs new file mode 100644 index 0000000000000..2405e8506ee0e --- /dev/null +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VMDiskSecurityProfile.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Compute.Batch +{ + /// Specifies the security profile settings for the managed disk. **Note**: It can only be set for Confidential VMs and required when using Confidential VMs. + public partial class VMDiskSecurityProfile + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public VMDiskSecurityProfile() + { + } + + /// Initializes a new instance of . + /// Specifies the EncryptionType of the managed disk. It is set to VMGuestStateOnly for encryption of just the VMGuestState blob, and NonPersistedTPM for not persisting firmware state in the VMGuestState blob. **Note**: It can be set for only Confidential VMs and is required when using Confidential VMs. + /// Keeps track of any properties unknown to the library. + internal VMDiskSecurityProfile(SecurityEncryptionTypes? securityEncryptionType, IDictionary serializedAdditionalRawData) + { + SecurityEncryptionType = securityEncryptionType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Specifies the EncryptionType of the managed disk. It is set to VMGuestStateOnly for encryption of just the VMGuestState blob, and NonPersistedTPM for not persisting firmware state in the VMGuestState blob. **Note**: It can be set for only Confidential VMs and is required when using Confidential VMs. + public SecurityEncryptionTypes? SecurityEncryptionType { get; set; } + } +} diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtension.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtension.Serialization.cs index 80a26f3037780..6cee9d8559a6c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtension.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtension.Serialization.cs @@ -18,6 +18,15 @@ public partial class VMExtension : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio throw new FormatException($"The model {nameof(VMExtension)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("name"u8); writer.WriteStringValue(Name); writer.WritePropertyName("publisher"u8); @@ -94,7 +102,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptio #endif } } - writer.WriteEndObject(); } VMExtension IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtensionInstanceView.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtensionInstanceView.Serialization.cs index 42e2efd6b4cf2..f3c4a523d78d9 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtensionInstanceView.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VMExtensionInstanceView.Serialization.cs @@ -18,6 +18,15 @@ public partial class VMExtensionInstanceView : IUtf8JsonSerializable, IJsonModel void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade throw new FormatException($"The model {nameof(VMExtensionInstanceView)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(Name)) { writer.WritePropertyName("name"u8); @@ -66,7 +74,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReade #endif } } - writer.WriteEndObject(); } VMExtensionInstanceView IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.Serialization.cs index 5a2268329b43a..b525c2aa820b1 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class VirtualMachineConfiguration : IUtf8JsonSerializable, IJsonM void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR throw new FormatException($"The model {nameof(VirtualMachineConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); writer.WritePropertyName("imageReference"u8); writer.WriteObjectValue(ImageReference, options); writer.WritePropertyName("nodeAgentSKUId"u8); @@ -105,7 +113,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelR #endif } } - writer.WriteEndObject(); } VirtualMachineConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.cs index 8923d6045e329..0d7b146d86eb8 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineConfiguration.cs @@ -67,7 +67,7 @@ public VirtualMachineConfiguration(ImageReference imageReference, string nodeAge /// A reference to the Azure Virtual Machines Marketplace Image or the custom Virtual Machine Image to use. /// The SKU of the Batch Compute Node agent to be provisioned on Compute Nodes in the Pool. The Batch Compute Node agent is a program that runs on each Compute Node in the Pool, and provides the command-and-control interface between the Compute Node and the Batch service. There are different implementations of the Compute Node agent, known as SKUs, for different operating systems. You must specify a Compute Node agent SKU which matches the selected Image reference. To get the list of supported Compute Node agent SKUs along with their list of verified Image references, see the 'List supported Compute Node agent SKUs' operation. /// Windows operating system settings on the virtual machine. This property must not be specified if the imageReference property specifies a Linux OS Image. - /// The configuration for data disks attached to the Compute Nodes in the Pool. This property must be specified if the Compute Nodes in the Pool need to have empty data disks attached to them. This cannot be updated. Each Compute Node gets its own disk (the disk is not a file share). Existing disks cannot be attached, each attached disk is empty. When the Compute Node is removed from the Pool, the disk and all data associated with it is also deleted. The disk is not formatted after being attached, it must be formatted before use - for more information see https://docs.microsoft.com/en-us/azure/virtual-machines/linux/classic/attach-disk#initialize-a-new-data-disk-in-linux and https://docs.microsoft.com/en-us/azure/virtual-machines/windows/attach-disk-ps#add-an-empty-data-disk-to-a-virtual-machine. + /// The configuration for data disks attached to the Compute Nodes in the Pool. This property must be specified if the Compute Nodes in the Pool need to have empty data disks attached to them. This cannot be updated. Each Compute Node gets its own disk (the disk is not a file share). Existing disks cannot be attached, each attached disk is empty. When the Compute Node is removed from the Pool, the disk and all data associated with it is also deleted. The disk is not formatted after being attached, it must be formatted before use - for more information see https://docs.microsoft.com/azure/virtual-machines/linux/classic/attach-disk#initialize-a-new-data-disk-in-linux and https://docs.microsoft.com/azure/virtual-machines/windows/attach-disk-ps#add-an-empty-data-disk-to-a-virtual-machine. /// /// This only applies to Images that contain the Windows operating system, and /// should only be used when you hold valid on-premises licenses for the Compute @@ -115,7 +115,7 @@ internal VirtualMachineConfiguration() public string NodeAgentSkuId { get; set; } /// Windows operating system settings on the virtual machine. This property must not be specified if the imageReference property specifies a Linux OS Image. public WindowsConfiguration WindowsConfiguration { get; set; } - /// The configuration for data disks attached to the Compute Nodes in the Pool. This property must be specified if the Compute Nodes in the Pool need to have empty data disks attached to them. This cannot be updated. Each Compute Node gets its own disk (the disk is not a file share). Existing disks cannot be attached, each attached disk is empty. When the Compute Node is removed from the Pool, the disk and all data associated with it is also deleted. The disk is not formatted after being attached, it must be formatted before use - for more information see https://docs.microsoft.com/en-us/azure/virtual-machines/linux/classic/attach-disk#initialize-a-new-data-disk-in-linux and https://docs.microsoft.com/en-us/azure/virtual-machines/windows/attach-disk-ps#add-an-empty-data-disk-to-a-virtual-machine. + /// The configuration for data disks attached to the Compute Nodes in the Pool. This property must be specified if the Compute Nodes in the Pool need to have empty data disks attached to them. This cannot be updated. Each Compute Node gets its own disk (the disk is not a file share). Existing disks cannot be attached, each attached disk is empty. When the Compute Node is removed from the Pool, the disk and all data associated with it is also deleted. The disk is not formatted after being attached, it must be formatted before use - for more information see https://docs.microsoft.com/azure/virtual-machines/linux/classic/attach-disk#initialize-a-new-data-disk-in-linux and https://docs.microsoft.com/azure/virtual-machines/windows/attach-disk-ps#add-an-empty-data-disk-to-a-virtual-machine. public IList DataDisks { get; } /// /// This only applies to Images that contain the Windows operating system, and diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineInfo.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineInfo.Serialization.cs index 90cd6837075d1..938991644eeab 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineInfo.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/VirtualMachineInfo.Serialization.cs @@ -18,6 +18,15 @@ public partial class VirtualMachineInfo : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(VirtualMachineInfo)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(ImageReference)) { writer.WritePropertyName("imageReference"u8); @@ -51,7 +59,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit #endif } } - writer.WriteEndObject(); } VirtualMachineInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsConfiguration.Serialization.cs index 273afa8eb8aba..843ff424ace8c 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class WindowsConfiguration : IUtf8JsonSerializable, IJsonModel ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr throw new FormatException($"The model {nameof(WindowsConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(EnableAutomaticUpdates)) { writer.WritePropertyName("enableAutomaticUpdates"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWr #endif } } - writer.WriteEndObject(); } WindowsConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.Serialization.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.Serialization.cs index 225d0b67c3d56..e39f8488139e3 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.Serialization.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.Serialization.cs @@ -18,6 +18,15 @@ public partial class WindowsUserConfiguration : IUtf8JsonSerializable, IJsonMode void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") @@ -25,7 +34,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead throw new FormatException($"The model {nameof(WindowsUserConfiguration)} does not support writing '{format}' format."); } - writer.WriteStartObject(); if (Optional.IsDefined(LoginMode)) { writer.WritePropertyName("loginMode"u8); @@ -46,7 +54,6 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead #endif } } - writer.WriteEndObject(); } WindowsUserConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) diff --git a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.cs b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.cs index 1f3173fcb231c..c096f45f32943 100644 --- a/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.cs +++ b/sdk/batch/Azure.Compute.Batch/src/Generated/WindowsUserConfiguration.cs @@ -51,7 +51,7 @@ public WindowsUserConfiguration() } /// Initializes a new instance of . - /// The login mode for the user. The default value for VirtualMachineConfiguration Pools is 'batch'. + /// The login mode for the user. The default is 'batch'. /// Keeps track of any properties unknown to the library. internal WindowsUserConfiguration(LoginMode? loginMode, IDictionary serializedAdditionalRawData) { @@ -59,7 +59,7 @@ internal WindowsUserConfiguration(LoginMode? loginMode, IDictionary The login mode for the user. The default value for VirtualMachineConfiguration Pools is 'batch'. + /// The login mode for the user. The default is 'batch'. public LoginMode? LoginMode { get; set; } } } diff --git a/sdk/batch/Azure.Compute.Batch/tests/Generated/Samples/Samples_BatchClient.cs b/sdk/batch/Azure.Compute.Batch/tests/Generated/Samples/Samples_BatchClient.cs index 274be47c20ed9..ce0bb02a54a9c 100644 --- a/sdk/batch/Azure.Compute.Batch/tests/Generated/Samples/Samples_BatchClient.cs +++ b/sdk/batch/Azure.Compute.Batch/tests/Generated/Samples/Samples_BatchClient.cs @@ -20,7 +20,7 @@ public partial class Samples_BatchClient { [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetApplication_GetApplications() + public void Example_BatchClient_GetApplication_GetApplications() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -36,7 +36,7 @@ public void Example_Batch_GetApplication_GetApplications() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetApplication_GetApplications_Async() + public async Task Example_BatchClient_GetApplication_GetApplications_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -52,7 +52,7 @@ public async Task Example_Batch_GetApplication_GetApplications_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetApplication_GetApplications_Convenience() + public void Example_BatchClient_GetApplication_GetApplications_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -63,7 +63,7 @@ public void Example_Batch_GetApplication_GetApplications_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetApplication_GetApplications_Convenience_Async() + public async Task Example_BatchClient_GetApplication_GetApplications_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -74,7 +74,7 @@ public async Task Example_Batch_GetApplication_GetApplications_Convenience_Async [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking() + public void Example_BatchClient_CreatePool_CreatesAPoolWithAcceleratedNetworking() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -108,7 +108,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithAcceleratedNetworking_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -142,7 +142,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking_Convenience() + public void Example_BatchClient_CreatePool_CreatesAPoolWithAcceleratedNetworking_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -168,7 +168,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking_Conve [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithAcceleratedNetworking_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -194,7 +194,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithAcceleratedNetworking [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified() + public void Example_BatchClient_CreatePool_CreatesAPoolWithMountDriveSpecified() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -275,7 +275,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithMountDriveSpecified_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -356,7 +356,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_A [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_Convenience() + public void Example_BatchClient_CreatePool_CreatesAPoolWithMountDriveSpecified_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -407,7 +407,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_Conveni [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithMountDriveSpecified_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -458,7 +458,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithMountDriveSpecified_C [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -504,7 +504,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -550,7 +550,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Convenience() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -573,7 +573,10 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith }, Caching = CachingType.ReadWrite, DiskSizeGB = 100, - ManagedDisk = new ManagedDisk(StorageAccountType.StandardSSDLRS), + ManagedDisk = new ManagedDisk + { + StorageAccountType = StorageAccountType.StandardSSDLRS, + }, }, }, ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -587,7 +590,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithOSDisk_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -610,7 +613,10 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo }, Caching = CachingType.ReadWrite, DiskSizeGB = 100, - ManagedDisk = new ManagedDisk(StorageAccountType.StandardSSDLRS), + ManagedDisk = new ManagedDisk + { + StorageAccountType = StorageAccountType.StandardSSDLRS, + }, }, }, ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -624,7 +630,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags() + public void Example_BatchClient_CreatePool_CreatesASimplePoolWithResourceTags() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -659,7 +665,7 @@ public void Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_Async() + public async Task Example_BatchClient_CreatePool_CreatesASimplePoolWithResourceTags_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -694,7 +700,7 @@ public async Task Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_As [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_Convenience() + public void Example_BatchClient_CreatePool_CreatesASimplePoolWithResourceTags_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -721,7 +727,7 @@ public void Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_Convenie [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesASimplePoolWithResourceTags_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -748,7 +754,7 @@ public async Task Example_Batch_CreatePool_CreatesASimplePoolWithResourceTags_Co [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile() + public void Example_BatchClient_CreatePool_CreatesAPoolWithSecurityProfile() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -787,7 +793,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithSecurityProfile_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -826,7 +832,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Async [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Convenience() + public void Example_BatchClient_CreatePool_CreatesAPoolWithSecurityProfile_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -854,7 +860,7 @@ public void Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Convenience [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAPoolWithSecurityProfile_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -882,7 +888,7 @@ public async Task Example_Batch_CreatePool_CreatesAPoolWithSecurityProfile_Conve [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPool() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -928,7 +934,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPool_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -974,7 +980,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool_Convenience() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPool_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1002,7 +1008,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool_Con [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPool_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPool_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1030,7 +1036,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1075,7 +1081,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1120,7 +1126,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Convenience() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1152,7 +1158,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithContainers_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1184,7 +1190,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1248,7 +1254,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1312,7 +1318,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Convenience() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1354,7 +1360,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithExtensions_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1396,7 +1402,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1434,7 +1440,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1472,7 +1478,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience() + public void Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1501,7 +1507,7 @@ public void Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWith [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience_Async() + public async Task Example_BatchClient_CreatePool_CreatesAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1530,7 +1536,7 @@ public async Task Example_Batch_CreatePool_CreatesAVirtualMachineConfigurationPo [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeletePool_PoolDelete() + public void Example_BatchClient_DeletePool_PoolDelete() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1543,7 +1549,7 @@ public void Example_Batch_DeletePool_PoolDelete() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeletePool_PoolDelete_Async() + public async Task Example_BatchClient_DeletePool_PoolDelete_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1556,7 +1562,7 @@ public async Task Example_Batch_DeletePool_PoolDelete_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking() + public void Example_BatchClient_GetPool_GetAPoolWithAcceleratedNetworking() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1570,7 +1576,7 @@ public void Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Async() + public async Task Example_BatchClient_GetPool_GetAPoolWithAcceleratedNetworking_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1584,7 +1590,7 @@ public async Task Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Convenience() + public void Example_BatchClient_GetPool_GetAPoolWithAcceleratedNetworking_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1595,7 +1601,7 @@ public void Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Convenience( [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Convenience_Async() + public async Task Example_BatchClient_GetPool_GetAPoolWithAcceleratedNetworking_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1606,7 +1612,7 @@ public async Task Example_Batch_GetPool_GetAPoolWithAcceleratedNetworking_Conven [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_PoolGet() + public void Example_BatchClient_GetPool_PoolGet() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1620,7 +1626,7 @@ public void Example_Batch_GetPool_PoolGet() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_PoolGet_Async() + public async Task Example_BatchClient_GetPool_PoolGet_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1634,7 +1640,7 @@ public async Task Example_Batch_GetPool_PoolGet_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_PoolGet_Convenience() + public void Example_BatchClient_GetPool_PoolGet_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1645,7 +1651,7 @@ public void Example_Batch_GetPool_PoolGet_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_PoolGet_Convenience_Async() + public async Task Example_BatchClient_GetPool_PoolGet_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1656,7 +1662,7 @@ public async Task Example_Batch_GetPool_PoolGet_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1670,7 +1676,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurit [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1684,7 +1690,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithS [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Convenience() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1695,7 +1701,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurit [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Convenience_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithSecurityProfile_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1706,7 +1712,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithS [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1720,7 +1726,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensi [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1734,7 +1740,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithE [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Convenience() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1745,7 +1751,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensi [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Convenience_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithExtensions_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1756,7 +1762,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithE [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk() + public void Example_BatchClient_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1770,7 +1776,7 @@ public void Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk( [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Async() + public async Task Example_BatchClient_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1784,7 +1790,7 @@ public async Task Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithO [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Convenience() + public void Example_BatchClient_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1795,7 +1801,7 @@ public void Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_ [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Convenience_Async() + public async Task Example_BatchClient_GetPool_AddAVirtualMachineConfigurationPoolWithOSDisk_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1806,7 +1812,7 @@ public async Task Example_Batch_GetPool_AddAVirtualMachineConfigurationPoolWithO [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1820,7 +1826,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithService [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1834,7 +1840,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithS [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience() + public void Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1845,7 +1851,7 @@ public void Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithService [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience_Async() + public async Task Example_BatchClient_GetPool_GetAVirtualMachineConfigurationPoolWithServiceArtifactReference_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1856,7 +1862,7 @@ public async Task Example_Batch_GetPool_GetAVirtualMachineConfigurationPoolWithS [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_UpdatePool_PatchThePool() + public void Example_BatchClient_UpdatePool_PatchThePool() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1876,7 +1882,7 @@ public void Example_Batch_UpdatePool_PatchThePool() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_UpdatePool_PatchThePool_Async() + public async Task Example_BatchClient_UpdatePool_PatchThePool_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1896,7 +1902,7 @@ public async Task Example_Batch_UpdatePool_PatchThePool_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisablePoolAutoScale_DisablePoolAutoscale() + public void Example_BatchClient_DisablePoolAutoScale_DisablePoolAutoscale() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1909,7 +1915,7 @@ public void Example_Batch_DisablePoolAutoScale_DisablePoolAutoscale() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisablePoolAutoScale_DisablePoolAutoscale_Async() + public async Task Example_BatchClient_DisablePoolAutoScale_DisablePoolAutoscale_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1922,7 +1928,7 @@ public async Task Example_Batch_DisablePoolAutoScale_DisablePoolAutoscale_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale() + public void Example_BatchClient_EnablePoolAutoScale_PoolEnableAutoscale() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1940,7 +1946,7 @@ public void Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Async() + public async Task Example_BatchClient_EnablePoolAutoScale_PoolEnableAutoscale_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1958,7 +1964,7 @@ public async Task Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Convenience() + public void Example_BatchClient_EnablePoolAutoScale_PoolEnableAutoscale_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1974,7 +1980,7 @@ public void Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Convenience_Async() + public async Task Example_BatchClient_EnablePoolAutoScale_PoolEnableAutoscale_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -1990,7 +1996,7 @@ public async Task Example_Batch_EnablePoolAutoScale_PoolEnableAutoscale_Convenie [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale() + public void Example_BatchClient_EvaluatePoolAutoScale_PoolEvaluateAutoscale() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2008,7 +2014,7 @@ public void Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Async() + public async Task Example_BatchClient_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2026,7 +2032,7 @@ public async Task Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Asyn [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Convenience() + public void Example_BatchClient_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2038,7 +2044,7 @@ public void Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Convenienc [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Convenience_Async() + public async Task Example_BatchClient_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2050,7 +2056,7 @@ public async Task Example_Batch_EvaluatePoolAutoScale_PoolEvaluateAutoscale_Conv [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ResizePool_PoolResize() + public void Example_BatchClient_ResizePool_PoolResize() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2068,7 +2074,7 @@ public void Example_Batch_ResizePool_PoolResize() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ResizePool_PoolResize_Async() + public async Task Example_BatchClient_ResizePool_PoolResize_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2086,7 +2092,7 @@ public async Task Example_Batch_ResizePool_PoolResize_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ResizePool_PoolResize_Convenience() + public void Example_BatchClient_ResizePool_PoolResize_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2102,7 +2108,7 @@ public void Example_Batch_ResizePool_PoolResize_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ResizePool_PoolResize_Convenience_Async() + public async Task Example_BatchClient_ResizePool_PoolResize_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2118,7 +2124,7 @@ public async Task Example_Batch_ResizePool_PoolResize_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_StopPoolResize_PoolStopResize() + public void Example_BatchClient_StopPoolResize_PoolStopResize() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2131,7 +2137,7 @@ public void Example_Batch_StopPoolResize_PoolStopResize() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_StopPoolResize_PoolStopResize_Async() + public async Task Example_BatchClient_StopPoolResize_PoolStopResize_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2144,7 +2150,7 @@ public async Task Example_Batch_StopPoolResize_PoolStopResize_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplacePoolProperties_PoolUpdate() + public void Example_BatchClient_ReplacePoolProperties_PoolUpdate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2166,7 +2172,7 @@ public void Example_Batch_ReplacePoolProperties_PoolUpdate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplacePoolProperties_PoolUpdate_Async() + public async Task Example_BatchClient_ReplacePoolProperties_PoolUpdate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2188,7 +2194,7 @@ public async Task Example_Batch_ReplacePoolProperties_PoolUpdate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplacePoolProperties_PoolUpdate_Convenience() + public void Example_BatchClient_ReplacePoolProperties_PoolUpdate_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2203,7 +2209,7 @@ public void Example_Batch_ReplacePoolProperties_PoolUpdate_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplacePoolProperties_PoolUpdate_Convenience_Async() + public async Task Example_BatchClient_ReplacePoolProperties_PoolUpdate_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2218,7 +2224,7 @@ public async Task Example_Batch_ReplacePoolProperties_PoolUpdate_Convenience_Asy [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_RemoveNodes_PoolRemoveNodes() + public void Example_BatchClient_RemoveNodes_PoolRemoveNodes() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2239,7 +2245,7 @@ public void Example_Batch_RemoveNodes_PoolRemoveNodes() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_RemoveNodes_PoolRemoveNodes_Async() + public async Task Example_BatchClient_RemoveNodes_PoolRemoveNodes_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2260,7 +2266,7 @@ public async Task Example_Batch_RemoveNodes_PoolRemoveNodes_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_RemoveNodes_PoolRemoveNodes_Convenience() + public void Example_BatchClient_RemoveNodes_PoolRemoveNodes_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2272,7 +2278,7 @@ public void Example_Batch_RemoveNodes_PoolRemoveNodes_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_RemoveNodes_PoolRemoveNodes_Convenience_Async() + public async Task Example_BatchClient_RemoveNodes_PoolRemoveNodes_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2284,7 +2290,7 @@ public async Task Example_Batch_RemoveNodes_PoolRemoveNodes_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteJob_DeleteJob() + public void Example_BatchClient_DeleteJob_DeleteJob() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2297,7 +2303,7 @@ public void Example_Batch_DeleteJob_DeleteJob() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteJob_DeleteJob_Async() + public async Task Example_BatchClient_DeleteJob_DeleteJob_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2310,7 +2316,7 @@ public async Task Example_Batch_DeleteJob_DeleteJob_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJob_JobGet() + public void Example_BatchClient_GetJob_JobGet() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2324,7 +2330,7 @@ public void Example_Batch_GetJob_JobGet() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJob_JobGet_Async() + public async Task Example_BatchClient_GetJob_JobGet_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2338,7 +2344,7 @@ public async Task Example_Batch_GetJob_JobGet_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJob_JobGet_Convenience() + public void Example_BatchClient_GetJob_JobGet_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2349,7 +2355,7 @@ public void Example_Batch_GetJob_JobGet_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJob_JobGet_Convenience_Async() + public async Task Example_BatchClient_GetJob_JobGet_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2360,7 +2366,7 @@ public async Task Example_Batch_GetJob_JobGet_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_UpdateJob_JobPatch() + public void Example_BatchClient_UpdateJob_JobUpdate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2386,7 +2392,7 @@ public void Example_Batch_UpdateJob_JobPatch() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_UpdateJob_JobPatch_Async() + public async Task Example_BatchClient_UpdateJob_JobUpdate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2412,7 +2418,7 @@ public async Task Example_Batch_UpdateJob_JobPatch_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceJob_JobUpdate() + public void Example_BatchClient_ReplaceJob_JobPatch() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2438,7 +2444,7 @@ public void Example_Batch_ReplaceJob_JobUpdate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceJob_JobUpdate_Async() + public async Task Example_BatchClient_ReplaceJob_JobPatch_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2464,7 +2470,7 @@ public async Task Example_Batch_ReplaceJob_JobUpdate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceJob_JobUpdate_Convenience() + public void Example_BatchClient_ReplaceJob_JobPatch_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2487,7 +2493,7 @@ public void Example_Batch_ReplaceJob_JobUpdate_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceJob_JobUpdate_Convenience_Async() + public async Task Example_BatchClient_ReplaceJob_JobPatch_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2510,7 +2516,7 @@ public async Task Example_Batch_ReplaceJob_JobUpdate_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisableJob_JobDisable() + public void Example_BatchClient_DisableJob_JobDisable() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2527,7 +2533,7 @@ public void Example_Batch_DisableJob_JobDisable() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisableJob_JobDisable_Async() + public async Task Example_BatchClient_DisableJob_JobDisable_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2544,7 +2550,7 @@ public async Task Example_Batch_DisableJob_JobDisable_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisableJob_JobDisable_Convenience() + public void Example_BatchClient_DisableJob_JobDisable_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2556,7 +2562,7 @@ public void Example_Batch_DisableJob_JobDisable_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisableJob_JobDisable_Convenience_Async() + public async Task Example_BatchClient_DisableJob_JobDisable_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2568,7 +2574,7 @@ public async Task Example_Batch_DisableJob_JobDisable_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EnableJob_JobEnable() + public void Example_BatchClient_EnableJob_JobEnable() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2581,7 +2587,7 @@ public void Example_Batch_EnableJob_JobEnable() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EnableJob_JobEnable_Async() + public async Task Example_BatchClient_EnableJob_JobEnable_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2594,7 +2600,7 @@ public async Task Example_Batch_EnableJob_JobEnable_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_TerminateJob_JobTerminate() + public void Example_BatchClient_TerminateJob_JobTerminate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2608,7 +2614,7 @@ public void Example_Batch_TerminateJob_JobTerminate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_TerminateJob_JobTerminate_Async() + public async Task Example_BatchClient_TerminateJob_JobTerminate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2622,7 +2628,7 @@ public async Task Example_Batch_TerminateJob_JobTerminate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_TerminateJob_JobTerminate_Convenience() + public void Example_BatchClient_TerminateJob_JobTerminate_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2633,7 +2639,7 @@ public void Example_Batch_TerminateJob_JobTerminate_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_TerminateJob_JobTerminate_Convenience_Async() + public async Task Example_BatchClient_TerminateJob_JobTerminate_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2644,7 +2650,7 @@ public async Task Example_Batch_TerminateJob_JobTerminate_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJob_CreatesABasicJob() + public void Example_BatchClient_CreateJob_CreatesABasicJob() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2666,7 +2672,7 @@ public void Example_Batch_CreateJob_CreatesABasicJob() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJob_CreatesABasicJob_Async() + public async Task Example_BatchClient_CreateJob_CreatesABasicJob_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2688,7 +2694,7 @@ public async Task Example_Batch_CreateJob_CreatesABasicJob_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJob_CreatesABasicJob_Convenience() + public void Example_BatchClient_CreateJob_CreatesABasicJob_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2706,7 +2712,7 @@ public void Example_Batch_CreateJob_CreatesABasicJob_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJob_CreatesABasicJob_Convenience_Async() + public async Task Example_BatchClient_CreateJob_CreatesABasicJob_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2724,7 +2730,7 @@ public async Task Example_Batch_CreateJob_CreatesABasicJob_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJob_CreatesAComplexJob() + public void Example_BatchClient_CreateJob_CreatesAComplexJob() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2790,7 +2796,7 @@ public void Example_Batch_CreateJob_CreatesAComplexJob() poolLifetimeOption = "job", pool = new { - vmSize = "Standard_D1_v2", + vmSize = "STANDARD_D2S_V3", virtualMachineConfiguration = new { imageReference = new @@ -2801,6 +2807,14 @@ public void Example_Batch_CreateJob_CreatesAComplexJob() version = "latest", }, nodeAgentSKUId = "batch.node.windows amd64", + windowsConfiguration = new + { + enableAutomaticUpdates = false, + }, + nodePlacementConfiguration = new + { + policy = "zonal", + }, }, resizeTimeout = "PT15M", targetDedicatedNodes = 3, @@ -2870,7 +2884,7 @@ public void Example_Batch_CreateJob_CreatesAComplexJob() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJob_CreatesAComplexJob_Async() + public async Task Example_BatchClient_CreateJob_CreatesAComplexJob_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -2936,7 +2950,7 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Async() poolLifetimeOption = "job", pool = new { - vmSize = "Standard_D1_v2", + vmSize = "STANDARD_D2S_V3", virtualMachineConfiguration = new { imageReference = new @@ -2947,6 +2961,14 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Async() version = "latest", }, nodeAgentSKUId = "batch.node.windows amd64", + windowsConfiguration = new + { + enableAutomaticUpdates = false, + }, + nodePlacementConfiguration = new + { + policy = "zonal", + }, }, resizeTimeout = "PT15M", targetDedicatedNodes = 3, @@ -3016,7 +3038,7 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJob_CreatesAComplexJob_Convenience() + public void Example_BatchClient_CreateJob_CreatesAComplexJob_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3027,7 +3049,7 @@ public void Example_Batch_CreateJob_CreatesAComplexJob_Convenience() AutoPoolSpecification = new BatchAutoPoolSpecification(BatchPoolLifetimeOption.Job) { AutoPoolIdPrefix = "mypool", - Pool = new BatchPoolSpecification("Standard_D1_v2") + Pool = new BatchPoolSpecification("STANDARD_D2S_V3") { VirtualMachineConfiguration = new VirtualMachineConfiguration(new ImageReference { @@ -3035,7 +3057,17 @@ public void Example_Batch_CreateJob_CreatesAComplexJob_Convenience() Offer = "WindowsServer", Sku = "2016-datacenter-smalldisk", Version = "latest", - }, "batch.node.windows amd64"), + }, "batch.node.windows amd64") + { + WindowsConfiguration = new WindowsConfiguration + { + EnableAutomaticUpdates = false, + }, + NodePlacementConfiguration = new BatchNodePlacementConfiguration + { + Policy = BatchNodePlacementPolicyType.Zonal, + }, + }, TaskSlotsPerNode = 2, TaskSchedulingPolicy = new BatchTaskSchedulingPolicy(BatchNodeFillType.Spread), ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -3117,7 +3149,7 @@ public void Example_Batch_CreateJob_CreatesAComplexJob_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJob_CreatesAComplexJob_Convenience_Async() + public async Task Example_BatchClient_CreateJob_CreatesAComplexJob_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3128,7 +3160,7 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Convenience_Async() AutoPoolSpecification = new BatchAutoPoolSpecification(BatchPoolLifetimeOption.Job) { AutoPoolIdPrefix = "mypool", - Pool = new BatchPoolSpecification("Standard_D1_v2") + Pool = new BatchPoolSpecification("STANDARD_D2S_V3") { VirtualMachineConfiguration = new VirtualMachineConfiguration(new ImageReference { @@ -3136,7 +3168,17 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Convenience_Async() Offer = "WindowsServer", Sku = "2016-datacenter-smalldisk", Version = "latest", - }, "batch.node.windows amd64"), + }, "batch.node.windows amd64") + { + WindowsConfiguration = new WindowsConfiguration + { + EnableAutomaticUpdates = false, + }, + NodePlacementConfiguration = new BatchNodePlacementConfiguration + { + Policy = BatchNodePlacementPolicyType.Zonal, + }, + }, TaskSlotsPerNode = 2, TaskSchedulingPolicy = new BatchTaskSchedulingPolicy(BatchNodeFillType.Spread), ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -3218,7 +3260,7 @@ public async Task Example_Batch_CreateJob_CreatesAComplexJob_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobTaskCounts_JobGetTaskCounts() + public void Example_BatchClient_GetJobTaskCounts_JobGetTaskCounts() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3241,7 +3283,7 @@ public void Example_Batch_GetJobTaskCounts_JobGetTaskCounts() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Async() + public async Task Example_BatchClient_GetJobTaskCounts_JobGetTaskCounts_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3264,7 +3306,7 @@ public async Task Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Convenience() + public void Example_BatchClient_GetJobTaskCounts_JobGetTaskCounts_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3275,7 +3317,7 @@ public void Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Convenience_Async() + public async Task Example_BatchClient_GetJobTaskCounts_JobGetTaskCounts_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3286,7 +3328,7 @@ public async Task Example_Batch_GetJobTaskCounts_JobGetTaskCounts_Convenience_As [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteJobSchedule_JobScheduleDelete() + public void Example_BatchClient_DeleteJobSchedule_JobScheduleDelete() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3299,7 +3341,7 @@ public void Example_Batch_DeleteJobSchedule_JobScheduleDelete() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteJobSchedule_JobScheduleDelete_Async() + public async Task Example_BatchClient_DeleteJobSchedule_JobScheduleDelete_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3312,7 +3354,7 @@ public async Task Example_Batch_DeleteJobSchedule_JobScheduleDelete_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobSchedule_JobScheduleGet() + public void Example_BatchClient_GetJobSchedule_JobScheduleGet() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3326,7 +3368,7 @@ public void Example_Batch_GetJobSchedule_JobScheduleGet() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobSchedule_JobScheduleGet_Async() + public async Task Example_BatchClient_GetJobSchedule_JobScheduleGet_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3340,7 +3382,7 @@ public async Task Example_Batch_GetJobSchedule_JobScheduleGet_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobSchedule_JobScheduleGet_Convenience() + public void Example_BatchClient_GetJobSchedule_JobScheduleGet_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3351,7 +3393,7 @@ public void Example_Batch_GetJobSchedule_JobScheduleGet_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobSchedule_JobScheduleGet_Convenience_Async() + public async Task Example_BatchClient_GetJobSchedule_JobScheduleGet_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3362,7 +3404,7 @@ public async Task Example_Batch_GetJobSchedule_JobScheduleGet_Convenience_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_UpdateJobSchedule_JobSchedulePatch() + public void Example_BatchClient_UpdateJobSchedule_JobScheduleUpdate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3396,7 +3438,7 @@ public void Example_Batch_UpdateJobSchedule_JobSchedulePatch() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_UpdateJobSchedule_JobSchedulePatch_Async() + public async Task Example_BatchClient_UpdateJobSchedule_JobScheduleUpdate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3430,7 +3472,7 @@ public async Task Example_Batch_UpdateJobSchedule_JobSchedulePatch_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceJobSchedule_JobScheduleUpdate() + public void Example_BatchClient_ReplaceJobSchedule_JobSchedulePatch() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3464,7 +3506,7 @@ public void Example_Batch_ReplaceJobSchedule_JobScheduleUpdate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Async() + public async Task Example_BatchClient_ReplaceJobSchedule_JobSchedulePatch_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3498,7 +3540,7 @@ public async Task Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Convenience() + public void Example_BatchClient_ReplaceJobSchedule_JobSchedulePatch_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3528,7 +3570,7 @@ public void Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Convenience_Async() + public async Task Example_BatchClient_ReplaceJobSchedule_JobSchedulePatch_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3558,7 +3600,7 @@ public async Task Example_Batch_ReplaceJobSchedule_JobScheduleUpdate_Convenience [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisableJobSchedule_JobScheduleDisable() + public void Example_BatchClient_DisableJobSchedule_JobScheduleDisable() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3571,7 +3613,7 @@ public void Example_Batch_DisableJobSchedule_JobScheduleDisable() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisableJobSchedule_JobScheduleDisable_Async() + public async Task Example_BatchClient_DisableJobSchedule_JobScheduleDisable_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3584,7 +3626,7 @@ public async Task Example_Batch_DisableJobSchedule_JobScheduleDisable_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EnableJobSchedule_JobScheduleEnable() + public void Example_BatchClient_EnableJobSchedule_JobScheduleEnable() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3597,7 +3639,7 @@ public void Example_Batch_EnableJobSchedule_JobScheduleEnable() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EnableJobSchedule_JobScheduleEnable_Async() + public async Task Example_BatchClient_EnableJobSchedule_JobScheduleEnable_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3610,7 +3652,7 @@ public async Task Example_Batch_EnableJobSchedule_JobScheduleEnable_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_TerminateJobSchedule_JobScheduleTerminate() + public void Example_BatchClient_TerminateJobSchedule_JobScheduleTerminate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3623,7 +3665,7 @@ public void Example_Batch_TerminateJobSchedule_JobScheduleTerminate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_TerminateJobSchedule_JobScheduleTerminate_Async() + public async Task Example_BatchClient_TerminateJobSchedule_JobScheduleTerminate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3636,7 +3678,7 @@ public async Task Example_Batch_TerminateJobSchedule_JobScheduleTerminate_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule() + public void Example_BatchClient_CreateJobSchedule_CreatesABasicJobSchedule() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3664,7 +3706,7 @@ public void Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Async() + public async Task Example_BatchClient_CreateJobSchedule_CreatesABasicJobSchedule_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3692,7 +3734,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Async [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Convenience() + public void Example_BatchClient_CreateJobSchedule_CreatesABasicJobSchedule_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3710,7 +3752,7 @@ public void Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Convenience [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Convenience_Async() + public async Task Example_BatchClient_CreateJobSchedule_CreatesABasicJobSchedule_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3728,7 +3770,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesABasicJobSchedule_Conve [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd() + public void Example_BatchClient_CreateJobSchedule_CreatesAComplexJobScheduleAdd() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3803,7 +3845,7 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd() poolLifetimeOption = "jobschedule", pool = new { - vmSize = "Standard_D1_v2", + vmSize = "STANDARD_D2S_V3", virtualMachineConfiguration = new { imageReference = new @@ -3814,6 +3856,14 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd() version = "latest", }, nodeAgentSKUId = "batch.node.windows amd64", + windowsConfiguration = new + { + enableAutomaticUpdates = false, + }, + nodePlacementConfiguration = new + { + policy = "zonal", + }, }, resizeTimeout = "PT15M", targetDedicatedNodes = 3, @@ -3884,7 +3934,7 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Async() + public async Task Example_BatchClient_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -3959,7 +4009,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ poolLifetimeOption = "jobschedule", pool = new { - vmSize = "Standard_D1_v2", + vmSize = "STANDARD_D2S_V3", virtualMachineConfiguration = new { imageReference = new @@ -3970,6 +4020,14 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ version = "latest", }, nodeAgentSKUId = "batch.node.windows amd64", + windowsConfiguration = new + { + enableAutomaticUpdates = false, + }, + nodePlacementConfiguration = new + { + policy = "zonal", + }, }, resizeTimeout = "PT15M", targetDedicatedNodes = 3, @@ -4040,7 +4098,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Convenience() + public void Example_BatchClient_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4057,7 +4115,7 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Conven AutoPoolSpecification = new BatchAutoPoolSpecification(BatchPoolLifetimeOption.JobSchedule) { AutoPoolIdPrefix = "mypool", - Pool = new BatchPoolSpecification("Standard_D1_v2") + Pool = new BatchPoolSpecification("STANDARD_D2S_V3") { VirtualMachineConfiguration = new VirtualMachineConfiguration(new ImageReference { @@ -4065,7 +4123,17 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Conven Offer = "WindowsServer", Sku = "2016-datacenter-smalldisk", Version = "latest", - }, "batch.node.windows amd64"), + }, "batch.node.windows amd64") + { + WindowsConfiguration = new WindowsConfiguration + { + EnableAutomaticUpdates = false, + }, + NodePlacementConfiguration = new BatchNodePlacementConfiguration + { + Policy = BatchNodePlacementPolicyType.Zonal, + }, + }, TaskSlotsPerNode = 2, TaskSchedulingPolicy = new BatchTaskSchedulingPolicy(BatchNodeFillType.Spread), ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -4149,7 +4217,7 @@ public void Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Conven [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Convenience_Async() + public async Task Example_BatchClient_CreateJobSchedule_CreatesAComplexJobScheduleAdd_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4166,7 +4234,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ AutoPoolSpecification = new BatchAutoPoolSpecification(BatchPoolLifetimeOption.JobSchedule) { AutoPoolIdPrefix = "mypool", - Pool = new BatchPoolSpecification("Standard_D1_v2") + Pool = new BatchPoolSpecification("STANDARD_D2S_V3") { VirtualMachineConfiguration = new VirtualMachineConfiguration(new ImageReference { @@ -4174,7 +4242,17 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ Offer = "WindowsServer", Sku = "2016-datacenter-smalldisk", Version = "latest", - }, "batch.node.windows amd64"), + }, "batch.node.windows amd64") + { + WindowsConfiguration = new WindowsConfiguration + { + EnableAutomaticUpdates = false, + }, + NodePlacementConfiguration = new BatchNodePlacementConfiguration + { + Policy = BatchNodePlacementPolicyType.Zonal, + }, + }, TaskSlotsPerNode = 2, TaskSchedulingPolicy = new BatchTaskSchedulingPolicy(BatchNodeFillType.Spread), ResizeTimeout = XmlConvert.ToTimeSpan("PT15M"), @@ -4258,7 +4336,7 @@ public async Task Example_Batch_CreateJobSchedule_CreatesAComplexJobScheduleAdd_ [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesABasicTask() + public void Example_BatchClient_CreateTask_CreatesABasicTask() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4276,7 +4354,7 @@ public void Example_Batch_CreateTask_CreatesABasicTask() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesABasicTask_Async() + public async Task Example_BatchClient_CreateTask_CreatesABasicTask_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4294,7 +4372,7 @@ public async Task Example_Batch_CreateTask_CreatesABasicTask_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesABasicTask_Convenience() + public void Example_BatchClient_CreateTask_CreatesABasicTask_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4306,7 +4384,7 @@ public void Example_Batch_CreateTask_CreatesABasicTask_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesABasicTask_Convenience_Async() + public async Task Example_BatchClient_CreateTask_CreatesABasicTask_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4318,7 +4396,7 @@ public async Task Example_Batch_CreateTask_CreatesABasicTask_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithContainerSettings() + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettings() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4349,7 +4427,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithContainerSettings() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettings_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4380,7 +4458,7 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Asy [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Convenience() + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettings_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4406,7 +4484,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Convenien [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Convenience_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettings_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4432,7 +4510,297 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithContainerSettings_Con [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithExitConditions() + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDataIsolation() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { +new +{ +source = "Task", +isReadOnly = true, +} + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, + }); + Response response = client.CreateTask("jobId", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDataIsolation_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { +new +{ +source = "Task", +isReadOnly = true, +} + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, + }); + Response response = await client.CreateTaskAsync("jobId", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDataIsolation_Convenience() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") + { + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, + }; + Response response = client.CreateTask("jobId", task); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDataIsolation_Convenience_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") + { + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, + }; + Response response = await client.CreateTaskAsync("jobId", task); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDuplicateSource() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { +new +{ +source = "Task", +isReadOnly = true, +}, +new +{ +source = "Task", +isReadOnly = true, +} + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, + }); + Response response = client.CreateTask("jobId", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDuplicateSource_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + id = "taskId", + commandLine = "bash -c 'echo hello'", + containerSettings = new + { + imageName = "ubuntu", + containerHostBatchBindMounts = new object[] + { +new +{ +source = "Task", +isReadOnly = true, +}, +new +{ +source = "Task", +isReadOnly = true, +} + }, + }, + userIdentity = new + { + autoUser = new + { + scope = "task", + elevationLevel = "nonadmin", + }, + }, + }); + Response response = await client.CreateTaskAsync("jobId", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDuplicateSource_Convenience() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") + { + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}, new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, + }; + Response response = client.CreateTask("jobId", task); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_CreateTask_CreatesATaskWithContainerSettingsWithDuplicateSource_Convenience_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + BatchTaskCreateContent task = new BatchTaskCreateContent("taskId", "bash -c 'echo hello'") + { + ContainerSettings = new BatchTaskContainerSettings("ubuntu") + { + ContainerHostBatchBindMounts = {new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}, new ContainerHostBatchBindMountEntry +{ +Source = ContainerHostDataPath.Task, +IsReadOnly = true, +}}, + }, + UserIdentity = new UserIdentity + { + AutoUser = new AutoUserSpecification + { + Scope = AutoUserScope.Task, + ElevationLevel = ElevationLevel.NonAdmin, + }, + }, + }; + Response response = await client.CreateTaskAsync("jobId", task); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_CreateTask_CreatesATaskWithExitConditions() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4473,7 +4841,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithExitConditions() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithExitConditions_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithExitConditions_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4514,7 +4882,7 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithExitConditions_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithExitConditions_Convenience() + public void Example_BatchClient_CreateTask_CreatesATaskWithExitConditions_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4543,7 +4911,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithExitConditions_Convenience( [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithExitConditions_Convenience_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithExitConditions_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4572,7 +4940,7 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithExitConditions_Conven [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement() + public void Example_BatchClient_CreateTask_CreatesATaskWithExtraSlotRequirement() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4591,7 +4959,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithExtraSlotRequirement_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4610,7 +4978,7 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_ [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_Convenience() + public void Example_BatchClient_CreateTask_CreatesATaskWithExtraSlotRequirement_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4625,7 +4993,7 @@ public void Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_Conven [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_Convenience_Async() + public async Task Example_BatchClient_CreateTask_CreatesATaskWithExtraSlotRequirement_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4640,7 +5008,7 @@ public async Task Example_Batch_CreateTask_CreatesATaskWithExtraSlotRequirement_ [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks() + public void Example_BatchClient_CreateTaskCollection_CreatesABasicCollectionOfTasks() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4670,7 +5038,7 @@ public void Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks_Async() + public async Task Example_BatchClient_CreateTaskCollection_CreatesABasicCollectionOfTasks_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4700,7 +5068,7 @@ public async Task Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTa [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks_Convenience() + public void Example_BatchClient_CreateTaskCollection_CreatesABasicCollectionOfTasks_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4716,7 +5084,7 @@ public void Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks_Co [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTasks_Convenience_Async() + public async Task Example_BatchClient_CreateTaskCollection_CreatesABasicCollectionOfTasks_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4732,7 +5100,7 @@ public async Task Example_Batch_CreateTaskCollection_CreatesABasicCollectionOfTa [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks() + public void Example_BatchClient_CreateTaskCollection_CreatesAComplexCollectionOfTasks() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4807,7 +5175,7 @@ public void Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks( [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Async() + public async Task Example_BatchClient_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4882,7 +5250,7 @@ public async Task Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOf [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Convenience() + public void Example_BatchClient_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4929,7 +5297,7 @@ public void Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks_ [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Convenience_Async() + public async Task Example_BatchClient_CreateTaskCollection_CreatesAComplexCollectionOfTasks_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4976,7 +5344,7 @@ public async Task Example_Batch_CreateTaskCollection_CreatesAComplexCollectionOf [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteTask_TaskDelete() + public void Example_BatchClient_DeleteTask_TaskDelete() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -4989,7 +5357,7 @@ public void Example_Batch_DeleteTask_TaskDelete() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteTask_TaskDelete_Async() + public async Task Example_BatchClient_DeleteTask_TaskDelete_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5002,7 +5370,7 @@ public async Task Example_Batch_DeleteTask_TaskDelete_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTask_TaskGet() + public void Example_BatchClient_GetTask_TaskGet() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5016,7 +5384,7 @@ public void Example_Batch_GetTask_TaskGet() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTask_TaskGet_Async() + public async Task Example_BatchClient_GetTask_TaskGet_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5030,7 +5398,7 @@ public async Task Example_Batch_GetTask_TaskGet_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTask_TaskGet_Convenience() + public void Example_BatchClient_GetTask_TaskGet_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5041,7 +5409,7 @@ public void Example_Batch_GetTask_TaskGet_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTask_TaskGet_Convenience_Async() + public async Task Example_BatchClient_GetTask_TaskGet_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5052,7 +5420,7 @@ public async Task Example_Batch_GetTask_TaskGet_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceTask_TaskUpdate() + public void Example_BatchClient_ReplaceTask_TaskUpdate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5074,7 +5442,7 @@ public void Example_Batch_ReplaceTask_TaskUpdate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceTask_TaskUpdate_Async() + public async Task Example_BatchClient_ReplaceTask_TaskUpdate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5096,7 +5464,7 @@ public async Task Example_Batch_ReplaceTask_TaskUpdate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceTask_TaskUpdate_Convenience() + public void Example_BatchClient_ReplaceTask_TaskUpdate_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5116,7 +5484,7 @@ public void Example_Batch_ReplaceTask_TaskUpdate_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceTask_TaskUpdate_Convenience_Async() + public async Task Example_BatchClient_ReplaceTask_TaskUpdate_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5136,7 +5504,7 @@ public async Task Example_Batch_ReplaceTask_TaskUpdate_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_TerminateTask_TaskTerminate() + public void Example_BatchClient_TerminateTask_TaskTerminate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5149,7 +5517,7 @@ public void Example_Batch_TerminateTask_TaskTerminate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_TerminateTask_TaskTerminate_Async() + public async Task Example_BatchClient_TerminateTask_TaskTerminate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5162,7 +5530,7 @@ public async Task Example_Batch_TerminateTask_TaskTerminate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReactivateTask_TaskReactivate() + public void Example_BatchClient_ReactivateTask_TaskReactivate() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5175,7 +5543,7 @@ public void Example_Batch_ReactivateTask_TaskReactivate() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReactivateTask_TaskReactivate_Async() + public async Task Example_BatchClient_ReactivateTask_TaskReactivate_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5188,7 +5556,7 @@ public async Task Example_Batch_ReactivateTask_TaskReactivate_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteTaskFile_FileDeleteFromTask() + public void Example_BatchClient_DeleteTaskFile_FileDeleteFromTask() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5201,7 +5569,7 @@ public void Example_Batch_DeleteTaskFile_FileDeleteFromTask() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteTaskFile_FileDeleteFromTask_Async() + public async Task Example_BatchClient_DeleteTaskFile_FileDeleteFromTask_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5214,7 +5582,7 @@ public async Task Example_Batch_DeleteTaskFile_FileDeleteFromTask_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTaskFile_GetFileFromTask() + public void Example_BatchClient_GetTaskFile_GetFileFromTask() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5228,7 +5596,7 @@ public void Example_Batch_GetTaskFile_GetFileFromTask() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTaskFile_GetFileFromTask_Async() + public async Task Example_BatchClient_GetTaskFile_GetFileFromTask_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5242,7 +5610,7 @@ public async Task Example_Batch_GetTaskFile_GetFileFromTask_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTaskFile_GetFileFromTask_Convenience() + public void Example_BatchClient_GetTaskFile_GetFileFromTask_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5253,7 +5621,7 @@ public void Example_Batch_GetTaskFile_GetFileFromTask_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTaskFile_GetFileFromTask_Convenience_Async() + public async Task Example_BatchClient_GetTaskFile_GetFileFromTask_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5264,7 +5632,7 @@ public async Task Example_Batch_GetTaskFile_GetFileFromTask_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateNodeUser_NodeCreateUser() + public void Example_BatchClient_CreateNodeUser_NodeCreateUser() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5284,7 +5652,7 @@ public void Example_Batch_CreateNodeUser_NodeCreateUser() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateNodeUser_NodeCreateUser_Async() + public async Task Example_BatchClient_CreateNodeUser_NodeCreateUser_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5304,7 +5672,7 @@ public async Task Example_Batch_CreateNodeUser_NodeCreateUser_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_CreateNodeUser_NodeCreateUser_Convenience() + public void Example_BatchClient_CreateNodeUser_NodeCreateUser_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5321,7 +5689,7 @@ public void Example_Batch_CreateNodeUser_NodeCreateUser_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_CreateNodeUser_NodeCreateUser_Convenience_Async() + public async Task Example_BatchClient_CreateNodeUser_NodeCreateUser_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5338,7 +5706,7 @@ public async Task Example_Batch_CreateNodeUser_NodeCreateUser_Convenience_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteNodeUser_NodeDeleteUser() + public void Example_BatchClient_DeleteNodeUser_NodeDeleteUser() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5351,7 +5719,7 @@ public void Example_Batch_DeleteNodeUser_NodeDeleteUser() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteNodeUser_NodeDeleteUser_Async() + public async Task Example_BatchClient_DeleteNodeUser_NodeDeleteUser_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5364,7 +5732,7 @@ public async Task Example_Batch_DeleteNodeUser_NodeDeleteUser_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceNodeUser_NodeUpdateUser() + public void Example_BatchClient_ReplaceNodeUser_NodeUpdateUser() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5382,7 +5750,7 @@ public void Example_Batch_ReplaceNodeUser_NodeUpdateUser() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceNodeUser_NodeUpdateUser_Async() + public async Task Example_BatchClient_ReplaceNodeUser_NodeUpdateUser_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5400,7 +5768,7 @@ public async Task Example_Batch_ReplaceNodeUser_NodeUpdateUser_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_ReplaceNodeUser_NodeUpdateUser_Convenience() + public void Example_BatchClient_ReplaceNodeUser_NodeUpdateUser_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5416,7 +5784,7 @@ public void Example_Batch_ReplaceNodeUser_NodeUpdateUser_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_ReplaceNodeUser_NodeUpdateUser_Convenience_Async() + public async Task Example_BatchClient_ReplaceNodeUser_NodeUpdateUser_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5432,7 +5800,7 @@ public async Task Example_Batch_ReplaceNodeUser_NodeUpdateUser_Convenience_Async [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNode_NodeGet() + public void Example_BatchClient_GetNode_NodeGet() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5446,7 +5814,7 @@ public void Example_Batch_GetNode_NodeGet() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNode_NodeGet_Async() + public async Task Example_BatchClient_GetNode_NodeGet_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5460,7 +5828,7 @@ public async Task Example_Batch_GetNode_NodeGet_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNode_NodeGet_Convenience() + public void Example_BatchClient_GetNode_NodeGet_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5471,7 +5839,7 @@ public void Example_Batch_GetNode_NodeGet_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNode_NodeGet_Convenience_Async() + public async Task Example_BatchClient_GetNode_NodeGet_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5482,7 +5850,7 @@ public async Task Example_Batch_GetNode_NodeGet_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_RebootNode_NodeReboot() + public void Example_BatchClient_RebootNode_NodeReboot() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5496,7 +5864,7 @@ public void Example_Batch_RebootNode_NodeReboot() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_RebootNode_NodeReboot_Async() + public async Task Example_BatchClient_RebootNode_NodeReboot_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5510,7 +5878,7 @@ public async Task Example_Batch_RebootNode_NodeReboot_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_RebootNode_NodeReboot_Convenience() + public void Example_BatchClient_RebootNode_NodeReboot_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5521,7 +5889,7 @@ public void Example_Batch_RebootNode_NodeReboot_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_RebootNode_NodeReboot_Convenience_Async() + public async Task Example_BatchClient_RebootNode_NodeReboot_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5532,7 +5900,133 @@ public async Task Example_Batch_RebootNode_NodeReboot_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisableNodeScheduling_NodeDisableScheduling() + public void Example_BatchClient_StartNode_NodeStart() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = client.StartNode("poolId", "tvm-1695681911_1-20161122t193202z"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_StartNode_NodeStart_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = await client.StartNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_ReimageNode_NodeReimage() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = null; + Response response = client.ReimageNode("poolId", "tvm-1695681911_1-20161122t193202z", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_ReimageNode_NodeReimage_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = null; + Response response = await client.ReimageNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_ReimageNode_NodeReimage_Convenience() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = client.ReimageNode("poolId", "tvm-1695681911_1-20161122t193202z"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_ReimageNode_NodeReimage_Convenience_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = await client.ReimageNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_DeallocateNode_NodeDeallocate() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = null; + Response response = client.DeallocateNode("poolId", "tvm-1695681911_1-20161122t193202z", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_DeallocateNode_NodeDeallocate_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + using RequestContent content = null; + Response response = await client.DeallocateNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_DeallocateNode_NodeDeallocate_Convenience() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = client.DeallocateNode("poolId", "tvm-1695681911_1-20161122t193202z"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_BatchClient_DeallocateNode_NodeDeallocate_Convenience_Async() + { + Uri endpoint = new Uri(""); + TokenCredential credential = new DefaultAzureCredential(); + BatchClient client = new BatchClient(endpoint, credential); + + Response response = await client.DeallocateNodeAsync("poolId", "tvm-1695681911_1-20161122t193202z"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_BatchClient_DisableNodeScheduling_NodeDisableScheduling() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5546,7 +6040,7 @@ public void Example_Batch_DisableNodeScheduling_NodeDisableScheduling() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Async() + public async Task Example_BatchClient_DisableNodeScheduling_NodeDisableScheduling_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5560,7 +6054,7 @@ public async Task Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Asyn [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Convenience() + public void Example_BatchClient_DisableNodeScheduling_NodeDisableScheduling_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5571,7 +6065,7 @@ public void Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Convenienc [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Convenience_Async() + public async Task Example_BatchClient_DisableNodeScheduling_NodeDisableScheduling_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5582,7 +6076,7 @@ public async Task Example_Batch_DisableNodeScheduling_NodeDisableScheduling_Conv [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_EnableNodeScheduling_NodeEnableScheduling() + public void Example_BatchClient_EnableNodeScheduling_NodeEnableScheduling() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5595,7 +6089,7 @@ public void Example_Batch_EnableNodeScheduling_NodeEnableScheduling() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_EnableNodeScheduling_NodeEnableScheduling_Async() + public async Task Example_BatchClient_EnableNodeScheduling_NodeEnableScheduling_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5608,7 +6102,7 @@ public async Task Example_Batch_EnableNodeScheduling_NodeEnableScheduling_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings() + public void Example_BatchClient_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5623,7 +6117,7 @@ public void Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings( [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Async() + public async Task Example_BatchClient_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5638,7 +6132,7 @@ public async Task Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSet [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Convenience() + public void Example_BatchClient_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5649,7 +6143,7 @@ public void Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_ [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Convenience_Async() + public async Task Example_BatchClient_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSettings_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5660,7 +6154,7 @@ public async Task Example_Batch_GetNodeRemoteLoginSettings_NodeGetRemoteLoginSet [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_UploadNodeLogs_UploadBatchServiceLogs() + public void Example_BatchClient_UploadNodeLogs_UploadBatchServiceLogs() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5680,7 +6174,7 @@ public void Example_Batch_UploadNodeLogs_UploadBatchServiceLogs() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Async() + public async Task Example_BatchClient_UploadNodeLogs_UploadBatchServiceLogs_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5700,7 +6194,7 @@ public async Task Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Convenience() + public void Example_BatchClient_UploadNodeLogs_UploadBatchServiceLogs_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5712,7 +6206,7 @@ public void Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Convenience_Async() + public async Task Example_BatchClient_UploadNodeLogs_UploadBatchServiceLogs_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5724,7 +6218,7 @@ public async Task Example_Batch_UploadNodeLogs_UploadBatchServiceLogs_Convenienc [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeExtension_GetBatchNodeExtension() + public void Example_BatchClient_GetNodeExtension_GetBatchNodeExtension() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5738,7 +6232,7 @@ public void Example_Batch_GetNodeExtension_GetBatchNodeExtension() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeExtension_GetBatchNodeExtension_Async() + public async Task Example_BatchClient_GetNodeExtension_GetBatchNodeExtension_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5752,7 +6246,7 @@ public async Task Example_Batch_GetNodeExtension_GetBatchNodeExtension_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeExtension_GetBatchNodeExtension_Convenience() + public void Example_BatchClient_GetNodeExtension_GetBatchNodeExtension_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5763,7 +6257,7 @@ public void Example_Batch_GetNodeExtension_GetBatchNodeExtension_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeExtension_GetBatchNodeExtension_Convenience_Async() + public async Task Example_BatchClient_GetNodeExtension_GetBatchNodeExtension_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5774,7 +6268,7 @@ public async Task Example_Batch_GetNodeExtension_GetBatchNodeExtension_Convenien [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_DeleteNodeFile_FileDeleteFromNode() + public void Example_BatchClient_DeleteNodeFile_FileDeleteFromNode() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5787,7 +6281,7 @@ public void Example_Batch_DeleteNodeFile_FileDeleteFromNode() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_DeleteNodeFile_FileDeleteFromNode_Async() + public async Task Example_BatchClient_DeleteNodeFile_FileDeleteFromNode_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5800,7 +6294,7 @@ public async Task Example_Batch_DeleteNodeFile_FileDeleteFromNode_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeFile_GetFileFromComputeNode() + public void Example_BatchClient_GetNodeFile_GetFileFromComputeNode() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5814,7 +6308,7 @@ public void Example_Batch_GetNodeFile_GetFileFromComputeNode() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeFile_GetFileFromComputeNode_Async() + public async Task Example_BatchClient_GetNodeFile_GetFileFromComputeNode_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5828,7 +6322,7 @@ public async Task Example_Batch_GetNodeFile_GetFileFromComputeNode_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeFile_GetFileFromComputeNode_Convenience() + public void Example_BatchClient_GetNodeFile_GetFileFromComputeNode_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5839,7 +6333,7 @@ public void Example_Batch_GetNodeFile_GetFileFromComputeNode_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeFile_GetFileFromComputeNode_Convenience_Async() + public async Task Example_BatchClient_GetNodeFile_GetFileFromComputeNode_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5850,7 +6344,7 @@ public async Task Example_Batch_GetNodeFile_GetFileFromComputeNode_Convenience_A [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetApplications_ListApplications() + public void Example_BatchClient_GetApplications_ListApplications() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5867,7 +6361,7 @@ public void Example_Batch_GetApplications_ListApplications() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetApplications_ListApplications_Async() + public async Task Example_BatchClient_GetApplications_ListApplications_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5884,7 +6378,7 @@ public async Task Example_Batch_GetApplications_ListApplications_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetApplications_ListApplications_Convenience() + public void Example_BatchClient_GetApplications_ListApplications_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5897,7 +6391,7 @@ public void Example_Batch_GetApplications_ListApplications_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetApplications_ListApplications_Convenience_Async() + public async Task Example_BatchClient_GetApplications_ListApplications_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5910,7 +6404,7 @@ public async Task Example_Batch_GetApplications_ListApplications_Convenience_Asy [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics() + public void Example_BatchClient_GetPoolUsageMetrics_PoolListUsageMetrics() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5929,7 +6423,7 @@ public void Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Async() + public async Task Example_BatchClient_GetPoolUsageMetrics_PoolListUsageMetrics_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5948,7 +6442,7 @@ public async Task Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Convenience() + public void Example_BatchClient_GetPoolUsageMetrics_PoolListUsageMetrics_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5961,7 +6455,7 @@ public void Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Convenience_Async() + public async Task Example_BatchClient_GetPoolUsageMetrics_PoolListUsageMetrics_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5974,7 +6468,7 @@ public async Task Example_Batch_GetPoolUsageMetrics_PoolListUsageMetrics_Conveni [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPools_PoolList() + public void Example_BatchClient_GetPools_PoolList() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -5989,7 +6483,7 @@ public void Example_Batch_GetPools_PoolList() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPools_PoolList_Async() + public async Task Example_BatchClient_GetPools_PoolList_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6004,7 +6498,7 @@ public async Task Example_Batch_GetPools_PoolList_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPools_PoolList_Convenience() + public void Example_BatchClient_GetPools_PoolList_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6017,7 +6511,7 @@ public void Example_Batch_GetPools_PoolList_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPools_PoolList_Convenience_Async() + public async Task Example_BatchClient_GetPools_PoolList_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6030,7 +6524,7 @@ public async Task Example_Batch_GetPools_PoolList_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetSupportedImages_AccountListNodeAgentSkus() + public void Example_BatchClient_GetSupportedImages_AccountListNodeAgentSkus() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6048,7 +6542,7 @@ public void Example_Batch_GetSupportedImages_AccountListNodeAgentSkus() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Async() + public async Task Example_BatchClient_GetSupportedImages_AccountListNodeAgentSkus_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6066,7 +6560,7 @@ public async Task Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Asyn [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Convenience() + public void Example_BatchClient_GetSupportedImages_AccountListNodeAgentSkus_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6079,7 +6573,7 @@ public void Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Convenienc [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Convenience_Async() + public async Task Example_BatchClient_GetSupportedImages_AccountListNodeAgentSkus_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6092,7 +6586,7 @@ public async Task Example_Batch_GetSupportedImages_AccountListNodeAgentSkus_Conv [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPoolNodeCounts_NodeCountsPayload() + public void Example_BatchClient_GetPoolNodeCounts_NodeCountsPayload() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6107,7 +6601,7 @@ public void Example_Batch_GetPoolNodeCounts_NodeCountsPayload() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Async() + public async Task Example_BatchClient_GetPoolNodeCounts_NodeCountsPayload_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6122,7 +6616,7 @@ public async Task Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Convenience() + public void Example_BatchClient_GetPoolNodeCounts_NodeCountsPayload_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6135,7 +6629,7 @@ public void Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Convenience_Async() + public async Task Example_BatchClient_GetPoolNodeCounts_NodeCountsPayload_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6148,7 +6642,7 @@ public async Task Example_Batch_GetPoolNodeCounts_NodeCountsPayload_Convenience_ [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobs_JobList() + public void Example_BatchClient_GetJobs_JobList() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6163,7 +6657,7 @@ public void Example_Batch_GetJobs_JobList() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobs_JobList_Async() + public async Task Example_BatchClient_GetJobs_JobList_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6178,7 +6672,7 @@ public async Task Example_Batch_GetJobs_JobList_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobs_JobList_Convenience() + public void Example_BatchClient_GetJobs_JobList_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6191,7 +6685,7 @@ public void Example_Batch_GetJobs_JobList_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobs_JobList_Convenience_Async() + public async Task Example_BatchClient_GetJobs_JobList_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6204,7 +6698,7 @@ public async Task Example_Batch_GetJobs_JobList_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule() + public void Example_BatchClient_GetJobsFromSchedules_ListJobUnderJobSchedule() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6219,7 +6713,7 @@ public void Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Async() + public async Task Example_BatchClient_GetJobsFromSchedules_ListJobUnderJobSchedule_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6234,7 +6728,7 @@ public async Task Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Asy [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Convenience() + public void Example_BatchClient_GetJobsFromSchedules_ListJobUnderJobSchedule_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6247,7 +6741,7 @@ public void Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Convenien [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Convenience_Async() + public async Task Example_BatchClient_GetJobsFromSchedules_ListJobUnderJobSchedule_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6260,7 +6754,7 @@ public async Task Example_Batch_GetJobsFromSchedules_ListJobUnderJobSchedule_Con [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus() + public void Example_BatchClient_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6275,7 +6769,7 @@ public void Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPrepara [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Async() + public async Task Example_BatchClient_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6290,7 +6784,7 @@ public async Task Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListP [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Convenience() + public void Example_BatchClient_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6303,7 +6797,7 @@ public void Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPrepara [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Convenience_Async() + public async Task Example_BatchClient_GetJobPreparationAndReleaseTaskStatuses_JobListPreparationAndReleaseTaskStatus_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6316,7 +6810,7 @@ public async Task Example_Batch_GetJobPreparationAndReleaseTaskStatuses_JobListP [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobSchedules_JobScheduleList() + public void Example_BatchClient_GetJobSchedules_JobScheduleList() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6331,7 +6825,7 @@ public void Example_Batch_GetJobSchedules_JobScheduleList() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobSchedules_JobScheduleList_Async() + public async Task Example_BatchClient_GetJobSchedules_JobScheduleList_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6346,7 +6840,7 @@ public async Task Example_Batch_GetJobSchedules_JobScheduleList_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetJobSchedules_JobScheduleList_Convenience() + public void Example_BatchClient_GetJobSchedules_JobScheduleList_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6359,7 +6853,7 @@ public void Example_Batch_GetJobSchedules_JobScheduleList_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetJobSchedules_JobScheduleList_Convenience_Async() + public async Task Example_BatchClient_GetJobSchedules_JobScheduleList_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6372,7 +6866,7 @@ public async Task Example_Batch_GetJobSchedules_JobScheduleList_Convenience_Asyn [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTasks_TaskList() + public void Example_BatchClient_GetTasks_TaskList() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6387,7 +6881,7 @@ public void Example_Batch_GetTasks_TaskList() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTasks_TaskList_Async() + public async Task Example_BatchClient_GetTasks_TaskList_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6402,7 +6896,7 @@ public async Task Example_Batch_GetTasks_TaskList_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTasks_TaskList_Convenience() + public void Example_BatchClient_GetTasks_TaskList_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6415,7 +6909,7 @@ public void Example_Batch_GetTasks_TaskList_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTasks_TaskList_Convenience_Async() + public async Task Example_BatchClient_GetTasks_TaskList_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6428,7 +6922,7 @@ public async Task Example_Batch_GetTasks_TaskList_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetSubTasks_TaskListSubtasks() + public void Example_BatchClient_GetSubTasks_TaskListSubtasks() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6443,7 +6937,7 @@ public void Example_Batch_GetSubTasks_TaskListSubtasks() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetSubTasks_TaskListSubtasks_Async() + public async Task Example_BatchClient_GetSubTasks_TaskListSubtasks_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6458,7 +6952,7 @@ public async Task Example_Batch_GetSubTasks_TaskListSubtasks_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetSubTasks_TaskListSubtasks_Convenience() + public void Example_BatchClient_GetSubTasks_TaskListSubtasks_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6471,7 +6965,7 @@ public void Example_Batch_GetSubTasks_TaskListSubtasks_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetSubTasks_TaskListSubtasks_Convenience_Async() + public async Task Example_BatchClient_GetSubTasks_TaskListSubtasks_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6484,7 +6978,7 @@ public async Task Example_Batch_GetSubTasks_TaskListSubtasks_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTaskFiles_FileListFromTask() + public void Example_BatchClient_GetTaskFiles_FileListFromTask() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6499,7 +6993,7 @@ public void Example_Batch_GetTaskFiles_FileListFromTask() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTaskFiles_FileListFromTask_Async() + public async Task Example_BatchClient_GetTaskFiles_FileListFromTask_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6514,7 +7008,7 @@ public async Task Example_Batch_GetTaskFiles_FileListFromTask_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetTaskFiles_FileListFromTask_Convenience() + public void Example_BatchClient_GetTaskFiles_FileListFromTask_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6527,7 +7021,7 @@ public void Example_Batch_GetTaskFiles_FileListFromTask_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetTaskFiles_FileListFromTask_Convenience_Async() + public async Task Example_BatchClient_GetTaskFiles_FileListFromTask_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6540,7 +7034,7 @@ public async Task Example_Batch_GetTaskFiles_FileListFromTask_Convenience_Async( [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodes_NodeList() + public void Example_BatchClient_GetNodes_NodeList() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6555,7 +7049,7 @@ public void Example_Batch_GetNodes_NodeList() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodes_NodeList_Async() + public async Task Example_BatchClient_GetNodes_NodeList_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6570,7 +7064,7 @@ public async Task Example_Batch_GetNodes_NodeList_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodes_NodeList_Convenience() + public void Example_BatchClient_GetNodes_NodeList_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6583,7 +7077,7 @@ public void Example_Batch_GetNodes_NodeList_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodes_NodeList_Convenience_Async() + public async Task Example_BatchClient_GetNodes_NodeList_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6596,7 +7090,7 @@ public async Task Example_Batch_GetNodes_NodeList_Convenience_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeExtensions_ListComputeNodeExtensions() + public void Example_BatchClient_GetNodeExtensions_ListComputeNodeExtensions() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6611,7 +7105,7 @@ public void Example_Batch_GetNodeExtensions_ListComputeNodeExtensions() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Async() + public async Task Example_BatchClient_GetNodeExtensions_ListComputeNodeExtensions_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6626,7 +7120,7 @@ public async Task Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Asyn [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Convenience() + public void Example_BatchClient_GetNodeExtensions_ListComputeNodeExtensions_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6639,7 +7133,7 @@ public void Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Convenienc [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Convenience_Async() + public async Task Example_BatchClient_GetNodeExtensions_ListComputeNodeExtensions_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6652,7 +7146,7 @@ public async Task Example_Batch_GetNodeExtensions_ListComputeNodeExtensions_Conv [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeFiles_FileListFromNode() + public void Example_BatchClient_GetNodeFiles_FileListFromNode() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6667,7 +7161,7 @@ public void Example_Batch_GetNodeFiles_FileListFromNode() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeFiles_FileListFromNode_Async() + public async Task Example_BatchClient_GetNodeFiles_FileListFromNode_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6682,7 +7176,7 @@ public async Task Example_Batch_GetNodeFiles_FileListFromNode_Async() [Test] [Ignore("Only validating compilation of examples")] - public void Example_Batch_GetNodeFiles_FileListFromNode_Convenience() + public void Example_BatchClient_GetNodeFiles_FileListFromNode_Convenience() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); @@ -6695,7 +7189,7 @@ public void Example_Batch_GetNodeFiles_FileListFromNode_Convenience() [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_Batch_GetNodeFiles_FileListFromNode_Convenience_Async() + public async Task Example_BatchClient_GetNodeFiles_FileListFromNode_Convenience_Async() { Uri endpoint = new Uri(""); TokenCredential credential = new DefaultAzureCredential(); diff --git a/sdk/batch/Azure.Compute.Batch/tsp-location.yaml b/sdk/batch/Azure.Compute.Batch/tsp-location.yaml index 399863ae4a83f..c435b8bf455ad 100644 --- a/sdk/batch/Azure.Compute.Batch/tsp-location.yaml +++ b/sdk/batch/Azure.Compute.Batch/tsp-location.yaml @@ -1,3 +1,4 @@ directory: specification/batch/Azure.Batch -commit: 9df71d5a717e4ed5e6728e7e6ba2fead60f62243 +commit: 8b74a1daf5240beafb6704a04d9a68dd96cbe7ee repo: Azure/azure-rest-api-specs +additionalDirectories: