public void Bug1771070_1771072_JobAndPoolLifetimeStats() { Action test = () => { using (BatchClient batchCli = TestUtilities.OpenBatchClientAsync(TestUtilities.GetCredentialsFromEnvironment()).Result) { JobStatistics jobStatistics = batchCli.JobOperations.GetAllJobsLifetimeStatistics(); PoolStatistics poolStatistics = batchCli.PoolOperations.GetAllPoolsLifetimeStatistics(); Assert.NotNull(jobStatistics); Assert.NotNull(poolStatistics); //Since we cannot really validate that the stats returned by the service are correct, the best we can do is make sure we get some //Dump a few properties from each stats bag to make sure they are populated this.testOutputHelper.WriteLine("JobScheduleStatistics.StartTime: {0}", jobStatistics.StartTime); this.testOutputHelper.WriteLine("JobScheduleStatistics.LastUpdateTime: {0}", jobStatistics.LastUpdateTime); this.testOutputHelper.WriteLine("JobScheduleStatistics.NumSucceededTasks: {0}", jobStatistics.SucceededTaskCount); this.testOutputHelper.WriteLine("JobScheduleStatistics.UserCpuTime: {0}", jobStatistics.UserCpuTime); this.testOutputHelper.WriteLine("PoolStatistics.StartTime: {0}", poolStatistics.StartTime); this.testOutputHelper.WriteLine("PoolStatistics.LastUpdateTime: {0}", poolStatistics.LastUpdateTime); this.testOutputHelper.WriteLine("PoolStatistics.ResourceStatistics.AvgMemory: {0}", poolStatistics.ResourceStatistics.AverageMemoryGiB); this.testOutputHelper.WriteLine("PoolStatistics.UsageStatistics.DedicatedCoreTime: {0}", poolStatistics.UsageStatistics.DedicatedCoreTime); } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }
/// <summary> /// Gets lifetime summary statistics for all of the jobs in the specified account. /// </summary> /// <param name="context">The account to use.</param> /// <param name="additionalBehaviors">Additional client behaviors to perform.</param> public PSJobStatistics GetAllJobsLifetimeStatistics(BatchAccountContext context, IEnumerable <BatchClientBehavior> additionalBehaviors = null) { WriteVerbose(Resources.GetAllJobsLifetimeStatistics); JobOperations jobOperations = context.BatchOMClient.JobOperations; JobStatistics jobStatistics = jobOperations.GetAllJobsLifetimeStatistics(additionalBehaviors); PSJobStatistics psJobStatistics = new PSJobStatistics(jobStatistics); return(psJobStatistics); }
public async Task <JobServiceModel> ScheduleDeviceMethodAsync( string jobId, string queryCondition, MethodParameterServiceModel parameter, DateTimeOffset startTimeUtc, long maxExecutionTimeInSeconds) { //var result = await this.jobClient.ScheduleDeviceMethodAsync( // jobId, queryCondition, // parameter.ToAzureModel(), // startTimeUtc.DateTime, // maxExecutionTimeInSeconds); var devicelistString = queryCondition.Replace("deviceId in", "").Trim(); var devicelist = JsonConvert.DeserializeObject <List <dynamic> >(devicelistString); List <DeviceJobServiceModel> devicemodellist = new List <DeviceJobServiceModel>(); foreach (var item in devicelist) { DeviceJobServiceModel data = new DeviceJobServiceModel(); data.DeviceId = item; data.Status = DeviceJobStatus.Scheduled; data.CreatedDateTimeUtc = DateTime.UtcNow; devicemodellist.Add(data); } var devicecount = devicemodellist.Count(); JobServiceModel json = new JobServiceModel(); json.CreatedTimeUtc = DateTime.UtcNow; json.Devices = devicemodellist.ToList(); json.Status = JobStatus.Scheduled; json.MethodParameter = parameter; json.Type = JobType.ScheduleUpdateTwin; JobStatistics ResultStatistics = new JobStatistics(); ResultStatistics.DeviceCount = devicecount; ResultStatistics.SucceededCount = 0; ResultStatistics.FailedCount = 0; ResultStatistics.PendingCount = 0; ResultStatistics.RunningCount = 0; json.ResultStatistics = ResultStatistics; var value = JsonConvert.SerializeObject(json, Formatting.Indented, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }); var result = await this.client.CreateAsync(DEVICE_JOBS_COLLECTION_ID, value); var Job = this.CreatejobServiceModel(result); return(Job); }
public async Task <JobServiceModel> ScheduleTwinUpdateAsync( string jobId, string queryCondition, TwinServiceModel twin, DateTimeOffset startTimeUtc, long maxExecutionTimeInSeconds) { //var result = await this.jobClient.ScheduleTwinUpdateAsync( // jobId, // queryCondition, // twin.ToAzureModel(), // startTimeUtc.DateTime, // maxExecutionTimeInSeconds); var devicelistString = queryCondition.Replace("deviceId in", "").Trim(); var devicelist = JsonConvert.DeserializeObject <List <dynamic> >(devicelistString); List <DeviceJobServiceModel> devicemodellist = new List <DeviceJobServiceModel>(); foreach (var item in devicelist) { DeviceJobServiceModel data = new DeviceJobServiceModel(); data.DeviceId = item; data.Status = DeviceJobStatus.Scheduled; data.CreatedDateTimeUtc = DateTime.UtcNow; devicemodellist.Add(data); } var devicecount = devicemodellist.Count(); JobServiceModel json = new JobServiceModel(); json.CreatedTimeUtc = DateTime.UtcNow; json.Devices = devicemodellist.ToList(); json.Status = JobStatus.Scheduled; json.UpdateTwin = twin; json.Type = JobType.ScheduleUpdateTwin; JobStatistics ResultStatistics = new JobStatistics(); ResultStatistics.DeviceCount = devicecount; ResultStatistics.SucceededCount = 0; ResultStatistics.FailedCount = 0; ResultStatistics.PendingCount = 0; ResultStatistics.RunningCount = 0; json.ResultStatistics = ResultStatistics; var value = JsonConvert.SerializeObject(json, Formatting.Indented, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }); var result = await this.client.CreateAsync(DEVICE_JOBS_COLLECTION_ID, value); var Job = this.CreatejobServiceModel(result); // Update the deviceProperties cache, no need to wait var model = new DevicePropertyServiceModel(); var tagRoot = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(twin.Tags)) as JToken; if (tagRoot != null) { model.Tags = new HashSet <string>(tagRoot.GetAllLeavesPath()); } var reportedRoot = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(twin.ReportedProperties)) as JToken; if (reportedRoot != null) { model.Reported = new HashSet <string>(reportedRoot.GetAllLeavesPath()); } var unused = deviceProperties.UpdateListAsync(model); return(Job); }
protected virtual async Task <Stream> Download(string uri) { uri = uri + (uri.Contains("?") ? "&" : "?") + API_key; Stream stream = null; bool success = false; for (int retryCount = 0; retryCount < config.MaxDownloadRetries; retryCount++) { await concurrentRequestThrottle.WaitAsync(); // Limit entrance to MAX_CONCURRENT_DOWNLOADS await batchThrottle.WaitForBatch(); // Wait when we have reached API max request limit. bool wait = false; // flag that allows us to first release concurrentRequestThrottle than wait for ERROR_DELAY. try { JobStatistics?.IncrementTotalRequestCount(1); JobStatistics?.IncrementActiveRequestCount(1); var response = await httpClient.GetAsync(uri); if (response.StatusCode == HttpStatusCode.OK) { stream = await response.Content.ReadAsStreamAsync(); success = true; break; } else if (response.StatusCode == HttpStatusCode.NotFound) { Logger.LogError("HttpStatusCode 404 received when accessing url: {uri}", uri); break; } else { int intCode = Convert.ToInt32(response.StatusCode); if (intCode == 429) // Max requests exceeded { batchThrottle.BlockNow(); } else { Logger.LogError("HttpStatusCode {code} received when accessing url: {uri}", intCode, uri); wait = true; } } } catch (Exception ex) { Logger.LogError(ex, "Exception while downloading url: {uri}", uri); wait = true; } finally { concurrentRequestThrottle.Release(); JobStatistics?.IncrementActiveRequestCount(-1); if (wait) { await Task.Delay(config.ErrorDelay); } } } if (!success) { Logger.LogError("Max retry count exceeded when attempting to download url: {uri}", uri); } return(stream); }
private void ManagerAllThreadsCompleted() { this.Manager.AllThreadsCompleted -= ManagerAllThreadsCompleted; this.Manager.RecordCompleted -= ManagerRecordCompleted; JobStatistics stats = new JobStatistics(this.Manager.ExecutionData.Values); bool hasErrors = (this.Statistics.FailureCount > 0); HistoryLog.RecordHistory( this.Context, String.Format("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}", this.Statistics.ExecutionCount, this.Context.Execution.TestCountPerThreadStep, this.Context.Execution.ThreadCount, this.Statistics.RecordsInRun, this.Statistics.FailureCount, this.Statistics.AverageAITime(), stats.TotalExecutionTime, (this.Statistics.RecordsInRun / stats.TotalExecutionTime), stats.MaxProcessedRecords, stats.MinProcessedRecords) ); this.StatusUpdate?.Invoke(String.Format("Finished job with {0} threads", this.Context.Execution.ThreadCount)); // Reset AI times and failure counts for next run this.Statistics.AITimes.Clear(); this.Statistics.FailureCount = 0; if (this.Context.Execution.AutoScaling) { if (++this.Statistics.ExecutionCount < this.Context.Execution.TestCountPerThreadStep) { String errorText = String.Format("INTERNAL - Scaling up but no more threads left -> {0}.", this.Context.Execution.ThreadCount); if (!hasErrors) { this.ErrorTrackingAutoScale[SUCCESS] = this.Context.Execution.ThreadCount; if (this.ErrorTrackingAutoScale[FAILURE] != -1) { errorText = String.Format("INTERNAL - Previous errors detected, not moving up thread count."); } else if (this.Context.Execution.ThreadCount < this.Context.Execution.MaxThreadCount) { this.Context.Execution.ThreadCount += this.Context.Execution.AutoScaleIncrement; errorText = String.Format("INTERNAL - Scaling up thread count"); ClientScalingLog.RecordScalingChange(this.Context, this.Context.Execution.ThreadCount); } } else // Something is wrong, scale back { this.StatusUpdate?.Invoke("Errors detected, scaling back"); this.ErrorTrackingAutoScale[FAILURE] = this.Context.Execution.ThreadCount; errorText = String.Format("INTERNAL - Scaled back to a single thread already"); if (this.Context.Execution.ThreadCount > 1) { this.Context.Execution.ThreadCount -= 1; errorText = String.Format("INTERNAL - Scaling back thread count with errors"); } ClientScalingLog.RecordScalingChange(this.Context, this.Context.Execution.ThreadCount); } EventHubUtility.ProcessOneOff(this.Context.HubConfiguration, this.Context.Execution.ClientName, 2, errorText); this.StartExecution(); } else { // Let caller know we're done. this.AllThreadsCompleted?.Invoke(); } } else { if (++this.Statistics.ExecutionCount < this.Context.Execution.TestCountPerThreadStep) { this.StartExecution(); } else { // Increase thread count until max if (this.Context.Execution.ThreadStep > 0 && this.Context.Execution.ThreadCount + this.Context.Execution.ThreadStep <= this.Context.Execution.MaxThreadCount) { this.Statistics.ExecutionCount = 0; this.Context.Execution.ThreadCount += this.Context.Execution.ThreadStep; this.StartExecution(); } else { this.AllThreadsCompleted?.Invoke(); } } } }