/// <summary> /// Run the job /// </summary> /// <param name="job"></param> /// <returns></returns> public override bool Run() { var progressWriter = new MultiThreadFileWriter(ProgressFilePath); Logger.LogInformation($"Checking {ProgressFilePath} existence..."); // Load already processed items from tracking file if exists var processedItems = new List <string>(); if (File.Exists(ProgressFilePath)) { Logger.LogInformation($"File {ProgressFilePath} detected! Continue process at it last state"); var lines = File.ReadAllLines(ProgressFilePath); processedItems = lines.ToList(); } else { Logger.LogInformation($"File {ProgressFilePath} not detected! Start process from 0"); } var jobName = GetName(); var query = GetQuery(CallerId); query.PageInfo.Count = JobSettings.QueryRecordLimit; query.NoLock = true; var results = ProxiesPool.MainProxy.RetrieveAll(query); Logger.LogInformation($"Retrieved {results.Entities.Count} records from CRM"); var processedItemCount = 0; var stopwatch = Stopwatch.StartNew(); var data = PrepareData(results.Entities); var dataCount = data.Count(); Logger.LogInformation($"{dataCount} records to process"); var threads = (this.OverrideThreadNumber.HasValue) ? this.OverrideThreadNumber : JobSettings.ThreadNumber; var progressDisplayStep = (this.OverrideProgressDisplayStep.HasValue) ? this.OverrideProgressDisplayStep.Value : DefaultProgressDisplayStep; Parallel.ForEach(data, new ParallelOptions() { MaxDegreeOfParallelism = threads.Value }, () => { var proxy = ProxiesPool.GetProxy(); return(new { Proxy = proxy }); }, (item, loopState, context) => { var jobExecutionContext = new JobExecutionContext(context.Proxy, item); jobExecutionContext.PushMetrics(base.ContextProperties); // Increment progress index Interlocked.Increment(ref processedItemCount); // Increment progress bar every x records if (processedItemCount % progressDisplayStep == 0) { Logger.LogInformation($"Processing record {processedItemCount} / {dataCount}"); } // Exit if record has already been processed if (processedItems.Contains(item.Id.ToString())) { return(context); } try { ProcessRecord(jobExecutionContext); Logger.LogSuccess("Record processed with success!", jobExecutionContext.DumpMetrics()); // Track job progress progressWriter.Write(item.Id.ToString()); } catch (FaultException <OrganizationServiceFault> faultException) { var properties = jobExecutionContext.DumpMetrics().MergeWith(faultException.ExportProperties()); Logger.LogFailure(faultException, properties); } catch (Exception ex) { Logger.LogFailure(ex, jobExecutionContext.DumpMetrics()); } return(context); }, (context) => { context.Proxy.Dispose(); }); stopwatch.Stop(); var speed = Utilities.GetSpeed(stopwatch.Elapsed.TotalMilliseconds, results.Entities.Count); Logger.LogInformation($"{dataCount} records processed in {stopwatch.Elapsed.TotalSeconds} => {stopwatch.Elapsed:g} [Speed = {speed}]!"); if (File.Exists(ProgressFilePath)) { File.Delete(ProgressFilePath); Logger.LogInformation($"Progress file {ProgressFilePath} removed!"); } return(true); }
/// <summary> /// Apply record modification /// </summary> /// <param name="jobExecutionContext"></param> public virtual void ProcessRecord(JobExecutionContext jobExecutionContext) => throw new NotImplementedException();
/// <summary> /// Run the job /// </summary> /// <param name="job"></param> /// <returns></returns> public override bool Run() { var jobName = GetName(); var query = GetQuery(CallerId); query.TopCount = JobSettings.QueryRecordLimit; query.NoLock = true; query.PageInfo = null; var records = ProxiesPool.MainProxy.RetrieveMultiple(query).Entities; var entityName = query.EntityName; var startTime = DateTime.Now; // Initialize last result count to prevent infinite loop int lastRunCount = JobSettings.QueryRecordLimit; var threads = (this.OverrideThreadNumber.HasValue) ? this.OverrideThreadNumber : JobSettings.ThreadNumber; int totalProcessed = 0; int totalSuccess = 0; int totalFailures = 0; while (records.Count > 0) { var stopwatch = Stopwatch.StartNew(); Logger.LogInformation($"Retrieved {records.Count} records from CRM (Entity : {entityName})"); int currentProcessed = 0; int currentSuccess = 0; int currentFailures = 0; Parallel.ForEach( records, new ParallelOptions() { MaxDegreeOfParallelism = threads.Value }, () => { var proxy = ProxiesPool.GetProxy(); return(new { Proxy = proxy }); }, (item, loopState, context) => { var jobExecutionContext = new JobExecutionContext(context.Proxy, item); jobExecutionContext.PushMetrics(base.ContextProperties); try { Interlocked.Increment(ref totalProcessed); Interlocked.Increment(ref currentProcessed); ProcessRecord(jobExecutionContext); Interlocked.Increment(ref totalSuccess); Interlocked.Increment(ref currentSuccess); Logger.LogSuccess($"Record processed with success! (Entity : {entityName})", jobExecutionContext.DumpMetrics()); } catch (FaultException <OrganizationServiceFault> faultException) { var properties = jobExecutionContext.DumpMetrics().MergeWith(faultException.ExportProperties()); Interlocked.Increment(ref totalFailures); Interlocked.Increment(ref currentFailures); Logger.LogFailure(faultException, properties); } catch (Exception ex) { Interlocked.Increment(ref totalFailures); Interlocked.Increment(ref currentFailures); Logger.LogFailure(ex, jobExecutionContext.DumpMetrics()); } return(context); }, (context) => { context.Proxy.Dispose(); } ); stopwatch.Stop(); var speed = Utilities.GetSpeed(stopwatch.Elapsed.TotalMilliseconds, records.Count); Logger.LogInformation($"{currentProcessed} records (Entity : {entityName}) processed in {stopwatch.Elapsed.TotalSeconds} => {stopwatch.Elapsed:g} [Speed = {speed} | Success = {currentSuccess} | Failures = {currentFailures}]!"); var duration = (DateTime.Now - startTime); var globalSpeed = Utilities.GetSpeed(duration.TotalMilliseconds, totalProcessed); Logger.LogInformation($"Total = {totalProcessed} records processed (Entity : {entityName}) in {duration:g}! [Speed = {globalSpeed} | Success = {totalSuccess} | Failures = {totalFailures}]"); // If we have the same number of record processed in this round than the previous one, // that mean that we don't need to continue if (lastRunCount < JobSettings.QueryRecordLimit && lastRunCount == records.Count) { Logger.LogInformation($"Operation completed! (Entity : {entityName} | Reason: Infinite loop detected)"); return(false); } // If job duration is greater or equal to execution limit, we can stop the process if (duration.TotalHours >= JobSettings.MaxRunDurationInHour) { Logger.LogInformation($"Operation completed! (Entity : {entityName} | Reason: Max duration reached)"); return(false); } // If we have only errors, we must stop if (currentFailures == records.Count) { Logger.LogInformation($"Operation failed! (Entity : {entityName} | Reason: Too many errors detected)"); return(false); } lastRunCount = records.Count; // Retrieve records for next round records = ProxiesPool.MainProxy.RetrieveMultiple(query).Entities; } // If the query return nothing, we have finished! if (records.Count == 0) { Logger.LogInformation($"Operation completed! (Entity : {entityName} | Reason: No more data to process)"); return(true); } return(false); }
/// <summary> /// Apply record modification /// </summary> /// <param name="context"></param> public abstract void ProcessRecord(JobExecutionContext context);
/// <summary> /// Run the job /// </summary> /// <param name="job"></param> /// <returns></returns> public override bool Run() { var jobName = GetName(); var defaultFileSeparator = GetInputFileSeparator().First(); List <string> lines = new List <string>(); // Check if pivot file exists if (File.Exists(GetPivotFilePath())) { var fileLines = File.ReadAllLines(GetPivotFilePath()); lines = fileLines.ToList(); Logger.LogInformation($"Retrieved {lines.Count} from file {GetPivotFilePath()}"); } else { // Load file content var fileLines = File.ReadAllLines(GetInputFilePath()); lines = fileLines.ToList(); Logger.LogInformation($"Retrieved {lines.Count} from file {GetInputFilePath()}"); // Create pivot file that track progress and outcome var header = lines.First(); var pivotLines = new List <string>() { string.Concat(header, defaultFileSeparator, PivotUniqueMarker, defaultFileSeparator, "RecordId", defaultFileSeparator, "Outcome", defaultFileSeparator, "Details") }; File.WriteAllLines(GetPivotFilePath(), pivotLines, Encoding.UTF8); } var pivotFileWriter = new MultiThreadFileWriter(GetPivotFilePath()); var processedItemCount = 0; var stopwatch = Stopwatch.StartNew(); var threads = (this.OverrideThreadNumber.HasValue) ? this.OverrideThreadNumber : JobSettings.ThreadNumber; var linesToProcess = lines.Skip(1); Parallel.ForEach( linesToProcess, new ParallelOptions() { MaxDegreeOfParallelism = threads.Value }, () => { var proxy = ProxiesPool.GetProxy(); return(new { Proxy = proxy }); }, (line, loopState, context) => { var jobExecutionContext = new JobExecutionContext(context.Proxy); jobExecutionContext.PushMetrics(base.ContextProperties); // Increment progress index Interlocked.Increment(ref processedItemCount); var isPivotLine = line.Contains(PivotUniqueMarker); if (isPivotLine) { // TODO : Handle already processed pivot lines to replay errors return(context); } Entity record = null; try { var lineData = line.Split(GetInputFileSeparator(), StringSplitOptions.RemoveEmptyEntries); // Retrieve CRM record based on current line record = SearchRecord(context.Proxy, lineData); jobExecutionContext.PushRecordToMetrics(record); ProcessRecord(jobExecutionContext, lineData); Logger.LogSuccess("Record processed with success!", jobExecutionContext.DumpMetrics()); // Track progress and outcome var pivotLine = string.Concat(line, defaultFileSeparator, PivotUniqueMarker, defaultFileSeparator, record.Id.ToString() /* RecordId */, defaultFileSeparator, "OK" /* Outcome */, defaultFileSeparator, "Success" /*Details */); pivotFileWriter.Write(pivotLine); } catch (FaultException <OrganizationServiceFault> faultException) { var properties = jobExecutionContext.DumpMetrics().MergeWith(faultException.ExportProperties()); Logger.LogFailure(faultException, properties); // Track progress and outcome var pivotLine = string.Concat(line, defaultFileSeparator, PivotUniqueMarker, defaultFileSeparator, record?.Id.ToString() /* RecordId */, defaultFileSeparator, "KO" /* Outcome */, defaultFileSeparator, faultException.Message /*Details */); pivotFileWriter.Write(pivotLine); } catch (Exception ex) { Logger.LogFailure(ex, jobExecutionContext.DumpMetrics()); // Track progress and outcome var pivotLine = string.Concat(line, defaultFileSeparator, PivotUniqueMarker, defaultFileSeparator, record?.Id.ToString() /* RecordId */, defaultFileSeparator, "KO" /* Outcome */, defaultFileSeparator, ex.Message /*Details */); pivotFileWriter.Write(pivotLine); } return(context); }, (context) => { context.Proxy.Dispose(); }); stopwatch.Stop(); var speed = Utilities.GetSpeed(stopwatch.Elapsed.TotalMilliseconds, lines.Count); Logger.LogInformation($"{lines.Count} records processed in {stopwatch.Elapsed.TotalSeconds} => {stopwatch.Elapsed.ToString("g")} [Speed = {speed}]!"); return(true); }
public virtual void ProcessRecord(JobExecutionContext context, string[] lineData) => throw new NotImplementedException();