public async Task TestGetSparkBatchJob() { SparkBatchJobCollection sparkJobs = (await SparkBatchClient.GetSparkBatchJobsAsync()).Value; foreach (SparkBatchJob expectedSparkJob in sparkJobs.Sessions) { SparkBatchJob actualSparkJob = await SparkBatchClient.GetSparkBatchJobAsync(expectedSparkJob.Id); ValidateSparkBatchJob(expectedSparkJob, actualSparkJob); } }
public async Task <Response <SparkBatchJobCollection> > GetSparkBatchJobsAsync(int? @from = null, int?size = null, bool?detailed = null, CancellationToken cancellationToken = default) { using var message = CreateGetSparkBatchJobsRequest(@from, size, detailed); await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); switch (message.Response.Status) { case 200: { SparkBatchJobCollection value = default; using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, cancellationToken).ConfigureAwait(false); value = SparkBatchJobCollection.DeserializeSparkBatchJobCollection(document.RootElement); return(Response.FromValue(value, message.Response)); }
public static async Task <List <SparkBatchJob> > ListSparkBatchJobsAsync(SparkBatchClient client, bool detailed = true) { List <SparkBatchJob> batches = new List <SparkBatchJob>(); int from = 0; int currentPageSize; int pageSize = 20; do { SparkBatchJobCollection page = (await client.GetSparkBatchJobsAsync(detailed: detailed, from: from, size: pageSize)).Value; currentPageSize = page.Total; from += currentPageSize; batches.AddRange(page.Sessions); } while (currentPageSize == pageSize); return(batches); }
public async Task TestGetSparkBatchJob() { SparkBatchJobCollection sparkJobs = (await SparkBatchClient.GetSparkBatchJobsAsync()).Value; foreach (SparkBatchJob expectedSparkJob in sparkJobs.Sessions) { try { SparkBatchJob actualSparkJob = await SparkBatchClient.GetSparkBatchJobAsync(expectedSparkJob.Id); ValidateSparkBatchJob(expectedSparkJob, actualSparkJob); } catch (Azure.RequestFailedException) { } } }